diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 14d37427e1fd..6ae7b95698f1 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python-mono-repo:latest - digest: sha256:9f9050e25149b9ecf6c41c5b07924982fe6ef026ce3317afe1f4df4279e8e4db + digest: sha256:2429ab9ff5a11e75f63d1f440523f79b6d246815759f3c3185fbb36b6291b357 diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 5e2613ca063d..c9b8a36f766d 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -37,4 +37,13 @@ before_action { env_vars: { key: "SECRET_MANAGER_KEYS" value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} \ No newline at end of file +} + +# Store the packages we uploaded to PyPI. That way, we have a record of exactly +# what we published, which we can use to generate SBOMs and attestations. +action { + define_artifacts { + regex: "github/google-cloud-python/**/*.tar.gz" + strip_prefix: "github/google-cloud-python" + } +} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000000..5405cc8ff1f3 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,31 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml +- repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black +- repo: https://github.com/pycqa/flake8 + rev: 3.9.2 + hooks: + - id: flake8 diff --git a/.release-please-manifest.json b/.release-please-manifest.json index d20e44997308..0dc80f028f46 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,11 +1,27 @@ { - "packages/google-cloud-contentwarehouse": "0.3.1", - "packages/google-cloud-discoveryengine": "0.3.1", - "packages/google-cloud-enterpriseknowledgegraph": "0.3.1", - "packages/google-geo-type": "0.3.0", - "packages/google-maps-addressvalidation": "0.2.1", - "packages/google-maps-routing": "0.3.1", - "packages/google-apps-script-type": "0.3.1", - "packages/google-cloud-vmwareengine": "0.2.2", - "packages/google-cloud-datacatalog-lineage": "0.2.1" -} \ No newline at end of file + "packages/google-ai-generativelanguage": "0.2.1", + "packages/google-apps-script-type": "0.3.2", + "packages/google-area120-tables": "0.11.2", + "packages/google-cloud-advisorynotifications": "0.2.0", + "packages/google-cloud-alloydb": "0.1.1", + "packages/google-cloud-apigee-connect": "1.7.1", + "packages/google-cloud-bigquery-biglake": "0.3.0", + "packages/google-cloud-confidentialcomputing": "0.1.0", + "packages/google-cloud-contentwarehouse": "0.5.0", + "packages/google-cloud-datacatalog-lineage": "0.2.2", + "packages/google-cloud-discoveryengine": "0.8.0", + "packages/google-cloud-domains": "1.5.1", + "packages/google-cloud-enterpriseknowledgegraph": "0.3.2", + "packages/google-cloud-iap": "1.10.1", + "packages/google-cloud-ids": "1.5.1", + "packages/google-cloud-rapidmigrationassessment": "0.1.0", + "packages/google-cloud-storageinsights": "0.1.0", + "packages/google-cloud-support": "0.1.0", + "packages/google-cloud-vmwareengine": "1.0.1", + "packages/google-cloud-workstations": "0.4.0", + "packages/google-geo-type": "0.3.1", + "packages/google-maps-addressvalidation": "0.3.4", + "packages/google-maps-mapsplatformdatasets": "0.3.0", + "packages/google-maps-places": "0.1.1", + "packages/google-maps-routing": "0.5.1" +} diff --git a/README.rst b/README.rst index 85783d36214e..21a6f02b9b21 100644 --- a/README.rst +++ b/README.rst @@ -484,6 +484,9 @@ Libraries * - `Distributed Edge Container `_ - |preview| - |PyPI-google-cloud-edgecontainer| + * - `Document AI Toolbox `_ + - |preview| + - |PyPI-google-cloud-documentai-toolbox| * - `Error Reporting `_ - |preview| - |PyPI-google-cloud-error-reporting| @@ -809,6 +812,8 @@ Libraries :target: https://pypi.org/project/google-cloud-dataform .. |PyPI-google-cloud-edgecontainer| image:: https://img.shields.io/pypi/v/google-cloud-edgecontainer.svg :target: https://pypi.org/project/google-cloud-edgecontainer +.. |PyPI-google-cloud-documentai-toolbox| image:: https://img.shields.io/pypi/v/google-cloud-documentai-toolbox.svg + :target: https://pypi.org/project/google-cloud-documentai-toolbox .. |PyPI-google-cloud-error-reporting| image:: https://img.shields.io/pypi/v/google-cloud-error-reporting.svg :target: https://pypi.org/project/google-cloud-error-reporting .. |PyPI-google-cloud-eventarc-publishing| image:: https://img.shields.io/pypi/v/google-cloud-eventarc-publishing.svg diff --git a/containers/python-bootstrap-container/Dockerfile b/containers/python-bootstrap-container/Dockerfile index a206925e5346..a05dab0ab6bd 100644 --- a/containers/python-bootstrap-container/Dockerfile +++ b/containers/python-bootstrap-container/Dockerfile @@ -14,7 +14,7 @@ # Use a multi-stage docker build to limit production dependencies. -FROM python:3.11.1-buster +FROM python:3.11.3-buster RUN apt update diff --git a/packages/google-ai-generativelanguage/.OwlBot.yaml b/packages/google-ai-generativelanguage/.OwlBot.yaml new file mode 100644 index 000000000000..11bf98c5a606 --- /dev/null +++ b/packages/google-ai-generativelanguage/.OwlBot.yaml @@ -0,0 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/ai/generativelanguage/(.*)/.*-py + dest: /owl-bot-staging/google-ai-generativelanguage/$1 diff --git a/packages/google-ai-generativelanguage/.coveragerc b/packages/google-ai-generativelanguage/.coveragerc new file mode 100644 index 000000000000..fd060ae956b5 --- /dev/null +++ b/packages/google-ai-generativelanguage/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/ai/generativelanguage/__init__.py + google/ai/generativelanguage/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-ai-generativelanguage/.flake8 b/packages/google-ai-generativelanguage/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-ai-generativelanguage/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-ai-generativelanguage/.gitignore b/packages/google-ai-generativelanguage/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-ai-generativelanguage/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-ai-generativelanguage/.repo-metadata.json b/packages/google-ai-generativelanguage/.repo-metadata.json new file mode 100644 index 000000000000..eb86a6058cd6 --- /dev/null +++ b/packages/google-ai-generativelanguage/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "generativelanguage", + "name_pretty": "Generative Language API", + "api_description": "Generative Language API", + "product_documentation": "https://developers.generativeai.google/", + "client_documentation": "https://googleapis.dev/python/generativelanguage/latest", + "issue_tracker": "https://github.com/google/generative-ai-python/issues/new", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-ai-generativelanguage", + "api_id": "generativelanguage.googleapis.com", + "default_version": "v1beta2", + "codeowner_team": "", + "api_shortname": "generativelanguage" +} diff --git a/packages/google-ai-generativelanguage/CHANGELOG.md b/packages/google-ai-generativelanguage/CHANGELOG.md new file mode 100644 index 000000000000..e6c0ae66f84e --- /dev/null +++ b/packages/google-ai-generativelanguage/CHANGELOG.md @@ -0,0 +1,24 @@ +# Changelog + +## [0.2.1](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.2.0...google-ai-generativelanguage-v0.2.1) (2023-06-03) + + +### Documentation + +* fix broken client library documentation links ([#11192](https://github.com/googleapis/google-cloud-python/issues/11192)) ([5e17f7a](https://github.com/googleapis/google-cloud-python/commit/5e17f7a901bbbae8ff9a44ed62f1abd2386da2c8)) + +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.1.0...google-ai-generativelanguage-v0.2.0) (2023-05-05) + + +### Features + +* Add safety settings ([#11148](https://github.com/googleapis/google-cloud-python/issues/11148)) ([f9544c9](https://github.com/googleapis/google-cloud-python/commit/f9544c9897dd4d010c5b8703c744d8f28ae3b070)) + +## 0.1.0 (2023-05-02) + + +### Features + +* add initial files for google.ai.generativelanguage.v1beta2 ([#11142](https://github.com/googleapis/google-cloud-python/issues/11142)) ([54363fd](https://github.com/googleapis/google-cloud-python/commit/54363fd60decdecb05302fc9bce8e278eb39951e)) + +## Changelog diff --git a/packages/google-ai-generativelanguage/CODE_OF_CONDUCT.md b/packages/google-ai-generativelanguage/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-ai-generativelanguage/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-ai-generativelanguage/CONTRIBUTING.rst b/packages/google-ai-generativelanguage/CONTRIBUTING.rst new file mode 100644 index 000000000000..f4b00077dd79 --- /dev/null +++ b/packages/google-ai-generativelanguage/CONTRIBUTING.rst @@ -0,0 +1,281 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-ai-generativelanguage + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +We also explicitly decided to support Python 3 beginning with version 3.7. +Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-ai-generativelanguage/LICENSE b/packages/google-ai-generativelanguage/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-ai-generativelanguage/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-ai-generativelanguage/MANIFEST.in b/packages/google-ai-generativelanguage/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-ai-generativelanguage/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-ai-generativelanguage/README.rst b/packages/google-ai-generativelanguage/README.rst new file mode 100644 index 000000000000..94ee778b3480 --- /dev/null +++ b/packages/google-ai-generativelanguage/README.rst @@ -0,0 +1,107 @@ +Python Client for Generative Language API +========================================= + +|preview| |pypi| |versions| + +`Generative Language API`_: Generative Language API + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-ai-generativelanguage.svg + :target: https://pypi.org/project/google-ai-generativelanguage/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-ai-generativelanguage.svg + :target: https://pypi.org/project/google-ai-generativelanguage/ +.. _Generative Language API: https://developers.generativeai.google/ +.. _Client Library Documentation: https://googleapis.dev/python/generativelanguage/latest +.. _Product Documentation: https://developers.generativeai.google/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Generative Language API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Generative Language API.: https://developers.generativeai.google/ +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-ai-generativelanguage + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-ai-generativelanguage + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Generative Language API + to see other available methods on the client. +- Read the `Generative Language API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Generative Language API Product documentation: https://developers.generativeai.google/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-ai-generativelanguage/SECURITY.md b/packages/google-ai-generativelanguage/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-ai-generativelanguage/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-ai-generativelanguage/docs/CHANGELOG.md b/packages/google-ai-generativelanguage/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-ai-generativelanguage/docs/README.rst b/packages/google-ai-generativelanguage/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-ai-generativelanguage/docs/_static/custom.css b/packages/google-ai-generativelanguage/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-ai-generativelanguage/docs/_templates/layout.html b/packages/google-ai-generativelanguage/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-ai-generativelanguage/docs/conf.py b/packages/google-ai-generativelanguage/docs/conf.py new file mode 100644 index 000000000000..c865aa35cd19 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-ai-generativelanguage documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-ai-generativelanguage" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-ai-generativelanguage", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-ai-generativelanguage-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-ai-generativelanguage.tex", + "google-ai-generativelanguage Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-ai-generativelanguage", + "google-ai-generativelanguage Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-ai-generativelanguage", + "google-ai-generativelanguage Documentation", + author, + "google-ai-generativelanguage", + "google-ai-generativelanguage Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta2/discuss_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta2/discuss_service.rst new file mode 100644 index 000000000000..be72af9f8e59 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta2/discuss_service.rst @@ -0,0 +1,6 @@ +DiscussService +-------------------------------- + +.. automodule:: google.ai.generativelanguage_v1beta2.services.discuss_service + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta2/model_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta2/model_service.rst new file mode 100644 index 000000000000..7edf8f7f17c5 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta2/model_service.rst @@ -0,0 +1,10 @@ +ModelService +------------------------------ + +.. automodule:: google.ai.generativelanguage_v1beta2.services.model_service + :members: + :inherited-members: + +.. automodule:: google.ai.generativelanguage_v1beta2.services.model_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta2/services.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta2/services.rst new file mode 100644 index 000000000000..e9e01c10ac08 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta2/services.rst @@ -0,0 +1,8 @@ +Services for Google Ai Generativelanguage v1beta2 API +===================================================== +.. toctree:: + :maxdepth: 2 + + discuss_service + model_service + text_service diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta2/text_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta2/text_service.rst new file mode 100644 index 000000000000..f30551e0f177 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta2/text_service.rst @@ -0,0 +1,6 @@ +TextService +----------------------------- + +.. automodule:: google.ai.generativelanguage_v1beta2.services.text_service + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta2/types.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta2/types.rst new file mode 100644 index 000000000000..81b702c1c9e1 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta2/types.rst @@ -0,0 +1,6 @@ +Types for Google Ai Generativelanguage v1beta2 API +================================================== + +.. automodule:: google.ai.generativelanguage_v1beta2.types + :members: + :show-inheritance: diff --git a/packages/google-ai-generativelanguage/docs/index.rst b/packages/google-ai-generativelanguage/docs/index.rst new file mode 100644 index 000000000000..8b2a9dd36744 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + generativelanguage_v1beta2/services + generativelanguage_v1beta2/types + + +Changelog +--------- + +For a list of all ``google-ai-generativelanguage`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-ai-generativelanguage/docs/multiprocessing.rst b/packages/google-ai-generativelanguage/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py new file mode 100644 index 000000000000..7a95ce1e9cd9 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.ai.generativelanguage import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.ai.generativelanguage_v1beta2.services.discuss_service.async_client import ( + DiscussServiceAsyncClient, +) +from google.ai.generativelanguage_v1beta2.services.discuss_service.client import ( + DiscussServiceClient, +) +from google.ai.generativelanguage_v1beta2.services.model_service.async_client import ( + ModelServiceAsyncClient, +) +from google.ai.generativelanguage_v1beta2.services.model_service.client import ( + ModelServiceClient, +) +from google.ai.generativelanguage_v1beta2.services.text_service.async_client import ( + TextServiceAsyncClient, +) +from google.ai.generativelanguage_v1beta2.services.text_service.client import ( + TextServiceClient, +) +from google.ai.generativelanguage_v1beta2.types.citation import ( + CitationMetadata, + CitationSource, +) +from google.ai.generativelanguage_v1beta2.types.discuss_service import ( + CountMessageTokensRequest, + CountMessageTokensResponse, + Example, + GenerateMessageRequest, + GenerateMessageResponse, + Message, + MessagePrompt, +) +from google.ai.generativelanguage_v1beta2.types.model import Model +from google.ai.generativelanguage_v1beta2.types.model_service import ( + GetModelRequest, + ListModelsRequest, + ListModelsResponse, +) +from google.ai.generativelanguage_v1beta2.types.safety import ( + ContentFilter, + HarmCategory, + SafetyFeedback, + SafetyRating, + SafetySetting, +) +from google.ai.generativelanguage_v1beta2.types.text_service import ( + Embedding, + EmbedTextRequest, + EmbedTextResponse, + GenerateTextRequest, + GenerateTextResponse, + TextCompletion, + TextPrompt, +) + +__all__ = ( + "DiscussServiceClient", + "DiscussServiceAsyncClient", + "ModelServiceClient", + "ModelServiceAsyncClient", + "TextServiceClient", + "TextServiceAsyncClient", + "CitationMetadata", + "CitationSource", + "CountMessageTokensRequest", + "CountMessageTokensResponse", + "Example", + "GenerateMessageRequest", + "GenerateMessageResponse", + "Message", + "MessagePrompt", + "Model", + "GetModelRequest", + "ListModelsRequest", + "ListModelsResponse", + "ContentFilter", + "SafetyFeedback", + "SafetyRating", + "SafetySetting", + "HarmCategory", + "Embedding", + "EmbedTextRequest", + "EmbedTextResponse", + "GenerateTextRequest", + "GenerateTextResponse", + "TextCompletion", + "TextPrompt", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py new file mode 100644 index 000000000000..f603aef57f86 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.2.1" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/py.typed b/packages/google-ai-generativelanguage/google/ai/generativelanguage/py.typed new file mode 100644 index 000000000000..38773eee6363 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-ai-generativelanguage package uses inline types. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/__init__.py new file mode 100644 index 000000000000..e79ae25663bc --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/__init__.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.ai.generativelanguage_v1beta2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.discuss_service import DiscussServiceAsyncClient, DiscussServiceClient +from .services.model_service import ModelServiceAsyncClient, ModelServiceClient +from .services.text_service import TextServiceAsyncClient, TextServiceClient +from .types.citation import CitationMetadata, CitationSource +from .types.discuss_service import ( + CountMessageTokensRequest, + CountMessageTokensResponse, + Example, + GenerateMessageRequest, + GenerateMessageResponse, + Message, + MessagePrompt, +) +from .types.model import Model +from .types.model_service import GetModelRequest, ListModelsRequest, ListModelsResponse +from .types.safety import ( + ContentFilter, + HarmCategory, + SafetyFeedback, + SafetyRating, + SafetySetting, +) +from .types.text_service import ( + Embedding, + EmbedTextRequest, + EmbedTextResponse, + GenerateTextRequest, + GenerateTextResponse, + TextCompletion, + TextPrompt, +) + +__all__ = ( + "DiscussServiceAsyncClient", + "ModelServiceAsyncClient", + "TextServiceAsyncClient", + "CitationMetadata", + "CitationSource", + "ContentFilter", + "CountMessageTokensRequest", + "CountMessageTokensResponse", + "DiscussServiceClient", + "EmbedTextRequest", + "EmbedTextResponse", + "Embedding", + "Example", + "GenerateMessageRequest", + "GenerateMessageResponse", + "GenerateTextRequest", + "GenerateTextResponse", + "GetModelRequest", + "HarmCategory", + "ListModelsRequest", + "ListModelsResponse", + "Message", + "MessagePrompt", + "Model", + "ModelServiceClient", + "SafetyFeedback", + "SafetyRating", + "SafetySetting", + "TextCompletion", + "TextPrompt", + "TextServiceClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_metadata.json b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_metadata.json new file mode 100644 index 000000000000..e4a6a33d7d90 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_metadata.json @@ -0,0 +1,156 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.ai.generativelanguage_v1beta2", + "protoPackage": "google.ai.generativelanguage.v1beta2", + "schema": "1.0", + "services": { + "DiscussService": { + "clients": { + "grpc": { + "libraryClient": "DiscussServiceClient", + "rpcs": { + "CountMessageTokens": { + "methods": [ + "count_message_tokens" + ] + }, + "GenerateMessage": { + "methods": [ + "generate_message" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DiscussServiceAsyncClient", + "rpcs": { + "CountMessageTokens": { + "methods": [ + "count_message_tokens" + ] + }, + "GenerateMessage": { + "methods": [ + "generate_message" + ] + } + } + }, + "rest": { + "libraryClient": "DiscussServiceClient", + "rpcs": { + "CountMessageTokens": { + "methods": [ + "count_message_tokens" + ] + }, + "GenerateMessage": { + "methods": [ + "generate_message" + ] + } + } + } + } + }, + "ModelService": { + "clients": { + "grpc": { + "libraryClient": "ModelServiceClient", + "rpcs": { + "GetModel": { + "methods": [ + "get_model" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ModelServiceAsyncClient", + "rpcs": { + "GetModel": { + "methods": [ + "get_model" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + } + } + }, + "rest": { + "libraryClient": "ModelServiceClient", + "rpcs": { + "GetModel": { + "methods": [ + "get_model" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + } + } + } + } + }, + "TextService": { + "clients": { + "grpc": { + "libraryClient": "TextServiceClient", + "rpcs": { + "EmbedText": { + "methods": [ + "embed_text" + ] + }, + "GenerateText": { + "methods": [ + "generate_text" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TextServiceAsyncClient", + "rpcs": { + "EmbedText": { + "methods": [ + "embed_text" + ] + }, + "GenerateText": { + "methods": [ + "generate_text" + ] + } + } + }, + "rest": { + "libraryClient": "TextServiceClient", + "rpcs": { + "EmbedText": { + "methods": [ + "embed_text" + ] + }, + "GenerateText": { + "methods": [ + "generate_text" + ] + } + } + } + } + } + } +} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py new file mode 100644 index 000000000000..f603aef57f86 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.2.1" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/py.typed b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/py.typed new file mode 100644 index 000000000000..38773eee6363 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-ai-generativelanguage package uses inline types. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/__init__.py new file mode 100644 index 000000000000..6adf9b9aa4d3 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import DiscussServiceAsyncClient +from .client import DiscussServiceClient + +__all__ = ( + "DiscussServiceClient", + "DiscussServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/async_client.py new file mode 100644 index 000000000000..36ac386630ef --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/async_client.py @@ -0,0 +1,543 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.ai.generativelanguage_v1beta2.types import discuss_service, safety + +from .client import DiscussServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, DiscussServiceTransport +from .transports.grpc_asyncio import DiscussServiceGrpcAsyncIOTransport + + +class DiscussServiceAsyncClient: + """An API for using Generative Language Models (GLMs) in dialog + applications. + Also known as large language models (LLMs), this API provides + models that are trained for multi-turn dialog. + """ + + _client: DiscussServiceClient + + DEFAULT_ENDPOINT = DiscussServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DiscussServiceClient.DEFAULT_MTLS_ENDPOINT + + model_path = staticmethod(DiscussServiceClient.model_path) + parse_model_path = staticmethod(DiscussServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + DiscussServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DiscussServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DiscussServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + DiscussServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + DiscussServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DiscussServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(DiscussServiceClient.common_project_path) + parse_common_project_path = staticmethod( + DiscussServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(DiscussServiceClient.common_location_path) + parse_common_location_path = staticmethod( + DiscussServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DiscussServiceAsyncClient: The constructed client. + """ + return DiscussServiceClient.from_service_account_info.__func__(DiscussServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DiscussServiceAsyncClient: The constructed client. + """ + return DiscussServiceClient.from_service_account_file.__func__(DiscussServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DiscussServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DiscussServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DiscussServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(DiscussServiceClient).get_transport_class, type(DiscussServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DiscussServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the discuss service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DiscussServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DiscussServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def generate_message( + self, + request: Optional[Union[discuss_service.GenerateMessageRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[discuss_service.MessagePrompt] = None, + temperature: Optional[float] = None, + candidate_count: Optional[int] = None, + top_p: Optional[float] = None, + top_k: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discuss_service.GenerateMessageResponse: + r"""Generates a response from the model given an input + ``MessagePrompt``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta2 + + async def sample_generate_message(): + # Create a client + client = generativelanguage_v1beta2.DiscussServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta2.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta2.GenerateMessageRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.generate_message(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta2.types.GenerateMessageRequest, dict]]): + The request object. Request to generate a message + response from the model. + model (:class:`str`): + Required. The name of the model to use. + + Format: ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (:class:`google.ai.generativelanguage_v1beta2.types.MessagePrompt`): + Required. The structured textual + input given to the model as a prompt. + Given a + prompt, the model will return what it + predicts is the next message in the + discussion. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + temperature (:class:`float`): + Optional. Controls the randomness of the output. + + Values can range over ``[0.0,1.0]``, inclusive. A value + closer to ``1.0`` will produce responses that are more + varied, while a value closer to ``0.0`` will typically + result in less surprising responses from the model. + + This corresponds to the ``temperature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + candidate_count (:class:`int`): + Optional. The number of generated response messages to + return. + + This value must be between ``[1, 8]``, inclusive. If + unset, this will default to ``1``. + + This corresponds to the ``candidate_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_p (:class:`float`): + Optional. The maximum cumulative probability of tokens + to consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Nucleus sampling considers the smallest set of tokens + whose probability sum is at least ``top_p``. + + This corresponds to the ``top_p`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_k (:class:`int`): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most + probable tokens. + + This corresponds to the ``top_k`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta2.types.GenerateMessageResponse: + The response from the model. + This includes candidate messages and + conversation history in the form of + chronologically-ordered messages. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [model, prompt, temperature, candidate_count, top_p, top_k] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = discuss_service.GenerateMessageRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + if temperature is not None: + request.temperature = temperature + if candidate_count is not None: + request.candidate_count = candidate_count + if top_p is not None: + request.top_p = top_p + if top_k is not None: + request.top_k = top_k + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_message, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def count_message_tokens( + self, + request: Optional[ + Union[discuss_service.CountMessageTokensRequest, dict] + ] = None, + *, + model: Optional[str] = None, + prompt: Optional[discuss_service.MessagePrompt] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discuss_service.CountMessageTokensResponse: + r"""Runs a model's tokenizer on a string and returns the + token count. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta2 + + async def sample_count_message_tokens(): + # Create a client + client = generativelanguage_v1beta2.DiscussServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta2.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta2.CountMessageTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.count_message_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta2.types.CountMessageTokensRequest, dict]]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (:class:`str`): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (:class:`google.ai.generativelanguage_v1beta2.types.MessagePrompt`): + Required. The prompt, whose token + count is to be returned. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta2.types.CountMessageTokensResponse: + A response from CountMessageTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, prompt]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = discuss_service.CountMessageTokensRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.count_message_tokens, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DiscussServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py new file mode 100644 index 000000000000..49fec683c77d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py @@ -0,0 +1,772 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.ai.generativelanguage_v1beta2.types import discuss_service, safety + +from .transports.base import DEFAULT_CLIENT_INFO, DiscussServiceTransport +from .transports.grpc import DiscussServiceGrpcTransport +from .transports.grpc_asyncio import DiscussServiceGrpcAsyncIOTransport +from .transports.rest import DiscussServiceRestTransport + + +class DiscussServiceClientMeta(type): + """Metaclass for the DiscussService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DiscussServiceTransport]] + _transport_registry["grpc"] = DiscussServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DiscussServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DiscussServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DiscussServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DiscussServiceClient(metaclass=DiscussServiceClientMeta): + """An API for using Generative Language Models (GLMs) in dialog + applications. + Also known as large language models (LLMs), this API provides + models that are trained for multi-turn dialog. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DiscussServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DiscussServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DiscussServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DiscussServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DiscussServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the discuss service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DiscussServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DiscussServiceTransport): + # transport is a DiscussServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def generate_message( + self, + request: Optional[Union[discuss_service.GenerateMessageRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[discuss_service.MessagePrompt] = None, + temperature: Optional[float] = None, + candidate_count: Optional[int] = None, + top_p: Optional[float] = None, + top_k: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discuss_service.GenerateMessageResponse: + r"""Generates a response from the model given an input + ``MessagePrompt``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta2 + + def sample_generate_message(): + # Create a client + client = generativelanguage_v1beta2.DiscussServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta2.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta2.GenerateMessageRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.generate_message(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta2.types.GenerateMessageRequest, dict]): + The request object. Request to generate a message + response from the model. + model (str): + Required. The name of the model to use. + + Format: ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (google.ai.generativelanguage_v1beta2.types.MessagePrompt): + Required. The structured textual + input given to the model as a prompt. + Given a + prompt, the model will return what it + predicts is the next message in the + discussion. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + temperature (float): + Optional. Controls the randomness of the output. + + Values can range over ``[0.0,1.0]``, inclusive. A value + closer to ``1.0`` will produce responses that are more + varied, while a value closer to ``0.0`` will typically + result in less surprising responses from the model. + + This corresponds to the ``temperature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + candidate_count (int): + Optional. The number of generated response messages to + return. + + This value must be between ``[1, 8]``, inclusive. If + unset, this will default to ``1``. + + This corresponds to the ``candidate_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_p (float): + Optional. The maximum cumulative probability of tokens + to consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Nucleus sampling considers the smallest set of tokens + whose probability sum is at least ``top_p``. + + This corresponds to the ``top_p`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_k (int): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most + probable tokens. + + This corresponds to the ``top_k`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta2.types.GenerateMessageResponse: + The response from the model. + This includes candidate messages and + conversation history in the form of + chronologically-ordered messages. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [model, prompt, temperature, candidate_count, top_p, top_k] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a discuss_service.GenerateMessageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, discuss_service.GenerateMessageRequest): + request = discuss_service.GenerateMessageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + if temperature is not None: + request.temperature = temperature + if candidate_count is not None: + request.candidate_count = candidate_count + if top_p is not None: + request.top_p = top_p + if top_k is not None: + request.top_k = top_k + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_message] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def count_message_tokens( + self, + request: Optional[ + Union[discuss_service.CountMessageTokensRequest, dict] + ] = None, + *, + model: Optional[str] = None, + prompt: Optional[discuss_service.MessagePrompt] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discuss_service.CountMessageTokensResponse: + r"""Runs a model's tokenizer on a string and returns the + token count. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta2 + + def sample_count_message_tokens(): + # Create a client + client = generativelanguage_v1beta2.DiscussServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta2.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta2.CountMessageTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.count_message_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta2.types.CountMessageTokensRequest, dict]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (str): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (google.ai.generativelanguage_v1beta2.types.MessagePrompt): + Required. The prompt, whose token + count is to be returned. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta2.types.CountMessageTokensResponse: + A response from CountMessageTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, prompt]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a discuss_service.CountMessageTokensRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, discuss_service.CountMessageTokensRequest): + request = discuss_service.CountMessageTokensRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.count_message_tokens] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DiscussServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DiscussServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/__init__.py new file mode 100644 index 000000000000..39676b4199c6 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DiscussServiceTransport +from .grpc import DiscussServiceGrpcTransport +from .grpc_asyncio import DiscussServiceGrpcAsyncIOTransport +from .rest import DiscussServiceRestInterceptor, DiscussServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DiscussServiceTransport]] +_transport_registry["grpc"] = DiscussServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DiscussServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DiscussServiceRestTransport + +__all__ = ( + "DiscussServiceTransport", + "DiscussServiceGrpcTransport", + "DiscussServiceGrpcAsyncIOTransport", + "DiscussServiceRestTransport", + "DiscussServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/base.py new file mode 100644 index 000000000000..a36441a8d713 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/base.py @@ -0,0 +1,175 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta2 import gapic_version as package_version +from google.ai.generativelanguage_v1beta2.types import discuss_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class DiscussServiceTransport(abc.ABC): + """Abstract transport class for DiscussService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.generate_message: gapic_v1.method.wrap_method( + self.generate_message, + default_timeout=None, + client_info=client_info, + ), + self.count_message_tokens: gapic_v1.method.wrap_method( + self.count_message_tokens, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def generate_message( + self, + ) -> Callable[ + [discuss_service.GenerateMessageRequest], + Union[ + discuss_service.GenerateMessageResponse, + Awaitable[discuss_service.GenerateMessageResponse], + ], + ]: + raise NotImplementedError() + + @property + def count_message_tokens( + self, + ) -> Callable[ + [discuss_service.CountMessageTokensRequest], + Union[ + discuss_service.CountMessageTokensResponse, + Awaitable[discuss_service.CountMessageTokensResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DiscussServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/grpc.py new file mode 100644 index 000000000000..2dfa31b8a534 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/grpc.py @@ -0,0 +1,302 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1beta2.types import discuss_service + +from .base import DEFAULT_CLIENT_INFO, DiscussServiceTransport + + +class DiscussServiceGrpcTransport(DiscussServiceTransport): + """gRPC backend transport for DiscussService. + + An API for using Generative Language Models (GLMs) in dialog + applications. + Also known as large language models (LLMs), this API provides + models that are trained for multi-turn dialog. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def generate_message( + self, + ) -> Callable[ + [discuss_service.GenerateMessageRequest], + discuss_service.GenerateMessageResponse, + ]: + r"""Return a callable for the generate message method over gRPC. + + Generates a response from the model given an input + ``MessagePrompt``. + + Returns: + Callable[[~.GenerateMessageRequest], + ~.GenerateMessageResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_message" not in self._stubs: + self._stubs["generate_message"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta2.DiscussService/GenerateMessage", + request_serializer=discuss_service.GenerateMessageRequest.serialize, + response_deserializer=discuss_service.GenerateMessageResponse.deserialize, + ) + return self._stubs["generate_message"] + + @property + def count_message_tokens( + self, + ) -> Callable[ + [discuss_service.CountMessageTokensRequest], + discuss_service.CountMessageTokensResponse, + ]: + r"""Return a callable for the count message tokens method over gRPC. + + Runs a model's tokenizer on a string and returns the + token count. + + Returns: + Callable[[~.CountMessageTokensRequest], + ~.CountMessageTokensResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_message_tokens" not in self._stubs: + self._stubs["count_message_tokens"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta2.DiscussService/CountMessageTokens", + request_serializer=discuss_service.CountMessageTokensRequest.serialize, + response_deserializer=discuss_service.CountMessageTokensResponse.deserialize, + ) + return self._stubs["count_message_tokens"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DiscussServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..389142c168f3 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/grpc_asyncio.py @@ -0,0 +1,301 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1beta2.types import discuss_service + +from .base import DEFAULT_CLIENT_INFO, DiscussServiceTransport +from .grpc import DiscussServiceGrpcTransport + + +class DiscussServiceGrpcAsyncIOTransport(DiscussServiceTransport): + """gRPC AsyncIO backend transport for DiscussService. + + An API for using Generative Language Models (GLMs) in dialog + applications. + Also known as large language models (LLMs), this API provides + models that are trained for multi-turn dialog. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def generate_message( + self, + ) -> Callable[ + [discuss_service.GenerateMessageRequest], + Awaitable[discuss_service.GenerateMessageResponse], + ]: + r"""Return a callable for the generate message method over gRPC. + + Generates a response from the model given an input + ``MessagePrompt``. + + Returns: + Callable[[~.GenerateMessageRequest], + Awaitable[~.GenerateMessageResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_message" not in self._stubs: + self._stubs["generate_message"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta2.DiscussService/GenerateMessage", + request_serializer=discuss_service.GenerateMessageRequest.serialize, + response_deserializer=discuss_service.GenerateMessageResponse.deserialize, + ) + return self._stubs["generate_message"] + + @property + def count_message_tokens( + self, + ) -> Callable[ + [discuss_service.CountMessageTokensRequest], + Awaitable[discuss_service.CountMessageTokensResponse], + ]: + r"""Return a callable for the count message tokens method over gRPC. + + Runs a model's tokenizer on a string and returns the + token count. + + Returns: + Callable[[~.CountMessageTokensRequest], + Awaitable[~.CountMessageTokensResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_message_tokens" not in self._stubs: + self._stubs["count_message_tokens"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta2.DiscussService/CountMessageTokens", + request_serializer=discuss_service.CountMessageTokensRequest.serialize, + response_deserializer=discuss_service.CountMessageTokensResponse.deserialize, + ) + return self._stubs["count_message_tokens"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("DiscussServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/rest.py new file mode 100644 index 000000000000..dd64984281eb --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/rest.py @@ -0,0 +1,471 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.ai.generativelanguage_v1beta2.types import discuss_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DiscussServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DiscussServiceRestInterceptor: + """Interceptor for DiscussService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DiscussServiceRestTransport. + + .. code-block:: python + class MyCustomDiscussServiceInterceptor(DiscussServiceRestInterceptor): + def pre_count_message_tokens(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_count_message_tokens(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_message(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_message(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DiscussServiceRestTransport(interceptor=MyCustomDiscussServiceInterceptor()) + client = DiscussServiceClient(transport=transport) + + + """ + + def pre_count_message_tokens( + self, + request: discuss_service.CountMessageTokensRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[discuss_service.CountMessageTokensRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for count_message_tokens + + Override in a subclass to manipulate the request or metadata + before they are sent to the DiscussService server. + """ + return request, metadata + + def post_count_message_tokens( + self, response: discuss_service.CountMessageTokensResponse + ) -> discuss_service.CountMessageTokensResponse: + """Post-rpc interceptor for count_message_tokens + + Override in a subclass to manipulate the response + after it is returned by the DiscussService server but before + it is returned to user code. + """ + return response + + def pre_generate_message( + self, + request: discuss_service.GenerateMessageRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[discuss_service.GenerateMessageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_message + + Override in a subclass to manipulate the request or metadata + before they are sent to the DiscussService server. + """ + return request, metadata + + def post_generate_message( + self, response: discuss_service.GenerateMessageResponse + ) -> discuss_service.GenerateMessageResponse: + """Post-rpc interceptor for generate_message + + Override in a subclass to manipulate the response + after it is returned by the DiscussService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DiscussServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DiscussServiceRestInterceptor + + +class DiscussServiceRestTransport(DiscussServiceTransport): + """REST backend transport for DiscussService. + + An API for using Generative Language Models (GLMs) in dialog + applications. + Also known as large language models (LLMs), this API provides + models that are trained for multi-turn dialog. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DiscussServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DiscussServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CountMessageTokens(DiscussServiceRestStub): + def __hash__(self): + return hash("CountMessageTokens") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: discuss_service.CountMessageTokensRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discuss_service.CountMessageTokensResponse: + r"""Call the count message tokens method over HTTP. + + Args: + request (~.discuss_service.CountMessageTokensRequest): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.discuss_service.CountMessageTokensResponse: + A response from ``CountMessageTokens``. + + It returns the model's ``token_count`` for the + ``prompt``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/{model=models/*}:countMessageTokens", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_count_message_tokens( + request, metadata + ) + pb_request = discuss_service.CountMessageTokensRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = discuss_service.CountMessageTokensResponse() + pb_resp = discuss_service.CountMessageTokensResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_count_message_tokens(resp) + return resp + + class _GenerateMessage(DiscussServiceRestStub): + def __hash__(self): + return hash("GenerateMessage") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: discuss_service.GenerateMessageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discuss_service.GenerateMessageResponse: + r"""Call the generate message method over HTTP. + + Args: + request (~.discuss_service.GenerateMessageRequest): + The request object. Request to generate a message + response from the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.discuss_service.GenerateMessageResponse: + The response from the model. + This includes candidate messages and + conversation history in the form of + chronologically-ordered messages. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/{model=models/*}:generateMessage", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_generate_message( + request, metadata + ) + pb_request = discuss_service.GenerateMessageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = discuss_service.GenerateMessageResponse() + pb_resp = discuss_service.GenerateMessageResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_message(resp) + return resp + + @property + def count_message_tokens( + self, + ) -> Callable[ + [discuss_service.CountMessageTokensRequest], + discuss_service.CountMessageTokensResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CountMessageTokens(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_message( + self, + ) -> Callable[ + [discuss_service.GenerateMessageRequest], + discuss_service.GenerateMessageResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateMessage(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DiscussServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/__init__.py new file mode 100644 index 000000000000..fa113724a380 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ModelServiceAsyncClient +from .client import ModelServiceClient + +__all__ = ( + "ModelServiceClient", + "ModelServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/async_client.py new file mode 100644 index 000000000000..08d1920feb55 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/async_client.py @@ -0,0 +1,461 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.ai.generativelanguage_v1beta2.services.model_service import pagers +from google.ai.generativelanguage_v1beta2.types import model, model_service + +from .client import ModelServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ModelServiceTransport +from .transports.grpc_asyncio import ModelServiceGrpcAsyncIOTransport + + +class ModelServiceAsyncClient: + """Provides methods for getting metadata information about + Generative Models. + """ + + _client: ModelServiceClient + + DEFAULT_ENDPOINT = ModelServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ModelServiceClient.DEFAULT_MTLS_ENDPOINT + + model_path = staticmethod(ModelServiceClient.model_path) + parse_model_path = staticmethod(ModelServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + ModelServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ModelServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ModelServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(ModelServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(ModelServiceClient.common_organization_path) + parse_common_organization_path = staticmethod( + ModelServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ModelServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ModelServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ModelServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ModelServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceAsyncClient: The constructed client. + """ + return ModelServiceClient.from_service_account_info.__func__(ModelServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceAsyncClient: The constructed client. + """ + return ModelServiceClient.from_service_account_file.__func__(ModelServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ModelServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ModelServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ModelServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ModelServiceClient).get_transport_class, type(ModelServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ModelServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the model service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ModelServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ModelServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_model( + self, + request: Optional[Union[model_service.GetModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> model.Model: + r"""Gets information about a specific Model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta2 + + async def sample_get_model(): + # Create a client + client = generativelanguage_v1beta2.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta2.GetModelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_model(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta2.types.GetModelRequest, dict]]): + The request object. Request for getting information about + a specific Model. + name (:class:`str`): + Required. The resource name of the model. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta2.types.Model: + Information about a Generative + Language Model. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = model_service.GetModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_model, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_models( + self, + request: Optional[Union[model_service.ListModelsRequest, dict]] = None, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListModelsAsyncPager: + r"""Lists models available through the API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta2 + + async def sample_list_models(): + # Create a client + client = generativelanguage_v1beta2.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta2.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta2.types.ListModelsRequest, dict]]): + The request object. Request for listing all Models. + page_size (:class:`int`): + The maximum number of ``Models`` to return (per page). + + The service may return fewer models. If unspecified, at + most 50 models will be returned per page. This method + returns at most 1000 models per page, even if you pass a + larger page_size. + + This corresponds to the ``page_size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + page_token (:class:`str`): + A page token, received from a previous ``ListModels`` + call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListModels`` must match the call that provided the + page token. + + This corresponds to the ``page_token`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta2.services.model_service.pagers.ListModelsAsyncPager: + Response from ListModel containing a paginated list of + Models. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([page_size, page_token]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = model_service.ListModelsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if page_size is not None: + request.page_size = page_size + if page_token is not None: + request.page_token = page_token + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_models, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListModelsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ModelServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py new file mode 100644 index 000000000000..3b7432051226 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py @@ -0,0 +1,692 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.ai.generativelanguage_v1beta2.services.model_service import pagers +from google.ai.generativelanguage_v1beta2.types import model, model_service + +from .transports.base import DEFAULT_CLIENT_INFO, ModelServiceTransport +from .transports.grpc import ModelServiceGrpcTransport +from .transports.grpc_asyncio import ModelServiceGrpcAsyncIOTransport +from .transports.rest import ModelServiceRestTransport + + +class ModelServiceClientMeta(type): + """Metaclass for the ModelService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ModelServiceTransport]] + _transport_registry["grpc"] = ModelServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ModelServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ModelServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ModelServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ModelServiceClient(metaclass=ModelServiceClientMeta): + """Provides methods for getting metadata information about + Generative Models. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ModelServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ModelServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ModelServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the model service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ModelServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ModelServiceTransport): + # transport is a ModelServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get_model( + self, + request: Optional[Union[model_service.GetModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> model.Model: + r"""Gets information about a specific Model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta2 + + def sample_get_model(): + # Create a client + client = generativelanguage_v1beta2.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta2.GetModelRequest( + name="name_value", + ) + + # Make the request + response = client.get_model(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta2.types.GetModelRequest, dict]): + The request object. Request for getting information about + a specific Model. + name (str): + Required. The resource name of the model. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta2.types.Model: + Information about a Generative + Language Model. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a model_service.GetModelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, model_service.GetModelRequest): + request = model_service.GetModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_models( + self, + request: Optional[Union[model_service.ListModelsRequest, dict]] = None, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListModelsPager: + r"""Lists models available through the API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta2 + + def sample_list_models(): + # Create a client + client = generativelanguage_v1beta2.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta2.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta2.types.ListModelsRequest, dict]): + The request object. Request for listing all Models. + page_size (int): + The maximum number of ``Models`` to return (per page). + + The service may return fewer models. If unspecified, at + most 50 models will be returned per page. This method + returns at most 1000 models per page, even if you pass a + larger page_size. + + This corresponds to the ``page_size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + page_token (str): + A page token, received from a previous ``ListModels`` + call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListModels`` must match the call that provided the + page token. + + This corresponds to the ``page_token`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta2.services.model_service.pagers.ListModelsPager: + Response from ListModel containing a paginated list of + Models. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([page_size, page_token]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a model_service.ListModelsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, model_service.ListModelsRequest): + request = model_service.ListModelsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if page_size is not None: + request.page_size = page_size + if page_token is not None: + request.page_token = page_token + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_models] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListModelsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ModelServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ModelServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/pagers.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/pagers.py new file mode 100644 index 000000000000..1a944aa87a52 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.ai.generativelanguage_v1beta2.types import model, model_service + + +class ListModelsPager: + """A pager for iterating through ``list_models`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta2.types.ListModelsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``models`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListModels`` requests and continue to iterate + through the ``models`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta2.types.ListModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., model_service.ListModelsResponse], + request: model_service.ListModelsRequest, + response: model_service.ListModelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta2.types.ListModelsRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta2.types.ListModelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = model_service.ListModelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[model_service.ListModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[model.Model]: + for page in self.pages: + yield from page.models + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListModelsAsyncPager: + """A pager for iterating through ``list_models`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta2.types.ListModelsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``models`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListModels`` requests and continue to iterate + through the ``models`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta2.types.ListModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[model_service.ListModelsResponse]], + request: model_service.ListModelsRequest, + response: model_service.ListModelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta2.types.ListModelsRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta2.types.ListModelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = model_service.ListModelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[model_service.ListModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[model.Model]: + async def async_generator(): + async for page in self.pages: + for response in page.models: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/__init__.py new file mode 100644 index 000000000000..5041c1c05a07 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ModelServiceTransport +from .grpc import ModelServiceGrpcTransport +from .grpc_asyncio import ModelServiceGrpcAsyncIOTransport +from .rest import ModelServiceRestInterceptor, ModelServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ModelServiceTransport]] +_transport_registry["grpc"] = ModelServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ModelServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ModelServiceRestTransport + +__all__ = ( + "ModelServiceTransport", + "ModelServiceGrpcTransport", + "ModelServiceGrpcAsyncIOTransport", + "ModelServiceRestTransport", + "ModelServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/base.py new file mode 100644 index 000000000000..f584def422b2 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/base.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta2 import gapic_version as package_version +from google.ai.generativelanguage_v1beta2.types import model, model_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ModelServiceTransport(abc.ABC): + """Abstract transport class for ModelService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_model: gapic_v1.method.wrap_method( + self.get_model, + default_timeout=None, + client_info=client_info, + ), + self.list_models: gapic_v1.method.wrap_method( + self.list_models, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_model( + self, + ) -> Callable[ + [model_service.GetModelRequest], Union[model.Model, Awaitable[model.Model]] + ]: + raise NotImplementedError() + + @property + def list_models( + self, + ) -> Callable[ + [model_service.ListModelsRequest], + Union[ + model_service.ListModelsResponse, + Awaitable[model_service.ListModelsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ModelServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/grpc.py new file mode 100644 index 000000000000..ae01354b4a53 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/grpc.py @@ -0,0 +1,290 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1beta2.types import model, model_service + +from .base import DEFAULT_CLIENT_INFO, ModelServiceTransport + + +class ModelServiceGrpcTransport(ModelServiceTransport): + """gRPC backend transport for ModelService. + + Provides methods for getting metadata information about + Generative Models. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_model(self) -> Callable[[model_service.GetModelRequest], model.Model]: + r"""Return a callable for the get model method over gRPC. + + Gets information about a specific Model. + + Returns: + Callable[[~.GetModelRequest], + ~.Model]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_model" not in self._stubs: + self._stubs["get_model"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta2.ModelService/GetModel", + request_serializer=model_service.GetModelRequest.serialize, + response_deserializer=model.Model.deserialize, + ) + return self._stubs["get_model"] + + @property + def list_models( + self, + ) -> Callable[[model_service.ListModelsRequest], model_service.ListModelsResponse]: + r"""Return a callable for the list models method over gRPC. + + Lists models available through the API. + + Returns: + Callable[[~.ListModelsRequest], + ~.ListModelsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_models" not in self._stubs: + self._stubs["list_models"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta2.ModelService/ListModels", + request_serializer=model_service.ListModelsRequest.serialize, + response_deserializer=model_service.ListModelsResponse.deserialize, + ) + return self._stubs["list_models"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ModelServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..06ac6952df5b --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/grpc_asyncio.py @@ -0,0 +1,293 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1beta2.types import model, model_service + +from .base import DEFAULT_CLIENT_INFO, ModelServiceTransport +from .grpc import ModelServiceGrpcTransport + + +class ModelServiceGrpcAsyncIOTransport(ModelServiceTransport): + """gRPC AsyncIO backend transport for ModelService. + + Provides methods for getting metadata information about + Generative Models. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_model( + self, + ) -> Callable[[model_service.GetModelRequest], Awaitable[model.Model]]: + r"""Return a callable for the get model method over gRPC. + + Gets information about a specific Model. + + Returns: + Callable[[~.GetModelRequest], + Awaitable[~.Model]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_model" not in self._stubs: + self._stubs["get_model"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta2.ModelService/GetModel", + request_serializer=model_service.GetModelRequest.serialize, + response_deserializer=model.Model.deserialize, + ) + return self._stubs["get_model"] + + @property + def list_models( + self, + ) -> Callable[ + [model_service.ListModelsRequest], Awaitable[model_service.ListModelsResponse] + ]: + r"""Return a callable for the list models method over gRPC. + + Lists models available through the API. + + Returns: + Callable[[~.ListModelsRequest], + Awaitable[~.ListModelsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_models" not in self._stubs: + self._stubs["list_models"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta2.ModelService/ListModels", + request_serializer=model_service.ListModelsRequest.serialize, + response_deserializer=model_service.ListModelsResponse.deserialize, + ) + return self._stubs["list_models"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("ModelServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/rest.py new file mode 100644 index 000000000000..25b9a7443752 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/rest.py @@ -0,0 +1,418 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.ai.generativelanguage_v1beta2.types import model, model_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ModelServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ModelServiceRestInterceptor: + """Interceptor for ModelService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ModelServiceRestTransport. + + .. code-block:: python + class MyCustomModelServiceInterceptor(ModelServiceRestInterceptor): + def pre_get_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_model(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_models(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_models(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ModelServiceRestTransport(interceptor=MyCustomModelServiceInterceptor()) + client = ModelServiceClient(transport=transport) + + + """ + + def pre_get_model( + self, + request: model_service.GetModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[model_service.GetModelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_get_model(self, response: model.Model) -> model.Model: + """Post-rpc interceptor for get_model + + Override in a subclass to manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. + """ + return response + + def pre_list_models( + self, + request: model_service.ListModelsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[model_service.ListModelsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_models + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_list_models( + self, response: model_service.ListModelsResponse + ) -> model_service.ListModelsResponse: + """Post-rpc interceptor for list_models + + Override in a subclass to manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ModelServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ModelServiceRestInterceptor + + +class ModelServiceRestTransport(ModelServiceTransport): + """REST backend transport for ModelService. + + Provides methods for getting metadata information about + Generative Models. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ModelServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ModelServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetModel(ModelServiceRestStub): + def __hash__(self): + return hash("GetModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: model_service.GetModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> model.Model: + r"""Call the get model method over HTTP. + + Args: + request (~.model_service.GetModelRequest): + The request object. Request for getting information about + a specific Model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.model.Model: + Information about a Generative + Language Model. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta2/{name=models/*}", + }, + ] + request, metadata = self._interceptor.pre_get_model(request, metadata) + pb_request = model_service.GetModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = model.Model() + pb_resp = model.Model.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_model(resp) + return resp + + class _ListModels(ModelServiceRestStub): + def __hash__(self): + return hash("ListModels") + + def __call__( + self, + request: model_service.ListModelsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> model_service.ListModelsResponse: + r"""Call the list models method over HTTP. + + Args: + request (~.model_service.ListModelsRequest): + The request object. Request for listing all Models. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.model_service.ListModelsResponse: + Response from ``ListModel`` containing a paginated list + of Models. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta2/models", + }, + ] + request, metadata = self._interceptor.pre_list_models(request, metadata) + pb_request = model_service.ListModelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = model_service.ListModelsResponse() + pb_resp = model_service.ListModelsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_models(resp) + return resp + + @property + def get_model(self) -> Callable[[model_service.GetModelRequest], model.Model]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_models( + self, + ) -> Callable[[model_service.ListModelsRequest], model_service.ListModelsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListModels(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ModelServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/__init__.py new file mode 100644 index 000000000000..d59a75e5d69d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import TextServiceAsyncClient +from .client import TextServiceClient + +__all__ = ( + "TextServiceClient", + "TextServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/async_client.py new file mode 100644 index 000000000000..e41dcaebb258 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/async_client.py @@ -0,0 +1,552 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.ai.generativelanguage_v1beta2.types import safety, text_service + +from .client import TextServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, TextServiceTransport +from .transports.grpc_asyncio import TextServiceGrpcAsyncIOTransport + + +class TextServiceAsyncClient: + """API for using Generative Language Models (GLMs) trained to + generate text. + Also known as Large Language Models (LLM)s, these generate text + given an input prompt from the user. + """ + + _client: TextServiceClient + + DEFAULT_ENDPOINT = TextServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TextServiceClient.DEFAULT_MTLS_ENDPOINT + + model_path = staticmethod(TextServiceClient.model_path) + parse_model_path = staticmethod(TextServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + TextServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + TextServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(TextServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(TextServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(TextServiceClient.common_organization_path) + parse_common_organization_path = staticmethod( + TextServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(TextServiceClient.common_project_path) + parse_common_project_path = staticmethod( + TextServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(TextServiceClient.common_location_path) + parse_common_location_path = staticmethod( + TextServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextServiceAsyncClient: The constructed client. + """ + return TextServiceClient.from_service_account_info.__func__(TextServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextServiceAsyncClient: The constructed client. + """ + return TextServiceClient.from_service_account_file.__func__(TextServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TextServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> TextServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TextServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(TextServiceClient).get_transport_class, type(TextServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, TextServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the text service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.TextServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TextServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def generate_text( + self, + request: Optional[Union[text_service.GenerateTextRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[text_service.TextPrompt] = None, + temperature: Optional[float] = None, + candidate_count: Optional[int] = None, + max_output_tokens: Optional[int] = None, + top_p: Optional[float] = None, + top_k: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.GenerateTextResponse: + r"""Generates a response from the model given an input + message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta2 + + async def sample_generate_text(): + # Create a client + client = generativelanguage_v1beta2.TextServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta2.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1beta2.GenerateTextRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.generate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta2.types.GenerateTextRequest, dict]]): + The request object. Request to generate a text completion + response from the model. + model (:class:`str`): + Required. The model name to use with + the format name=models/{model}. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (:class:`google.ai.generativelanguage_v1beta2.types.TextPrompt`): + Required. The free-form input text + given to the model as a prompt. + Given a prompt, the model will generate + a TextCompletion response it predicts as + the completion of the input text. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + temperature (:class:`float`): + Controls the randomness of the output. Note: The default + value varies by model, see the ``Model.temperature`` + attribute of the ``Model`` returned the ``getModel`` + function. + + Values can range from [0.0,1.0], inclusive. A value + closer to 1.0 will produce responses that are more + varied and creative, while a value closer to 0.0 will + typically result in more straightforward responses from + the model. + + This corresponds to the ``temperature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + candidate_count (:class:`int`): + Number of generated responses to return. + + This value must be between [1, 8], inclusive. If unset, + this will default to 1. + + This corresponds to the ``candidate_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + max_output_tokens (:class:`int`): + The maximum number of tokens to + include in a candidate. + If unset, this will default to 64. + + This corresponds to the ``max_output_tokens`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_p (:class:`float`): + The maximum cumulative probability of tokens to consider + when sampling. + + The model uses combined Top-k and nucleus sampling. + + Tokens are sorted based on their assigned probabilities + so that only the most liekly tokens are considered. + Top-k sampling directly limits the maximum number of + tokens to consider, while Nucleus sampling limits number + of tokens based on the cumulative probability. + + Note: The default value varies by model, see the + ``Model.top_p`` attribute of the ``Model`` returned the + ``getModel`` function. + + This corresponds to the ``top_p`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_k (:class:`int`): + The maximum number of tokens to consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most + probable tokens. Defaults to 40. + + Note: The default value varies by model, see the + ``Model.top_k`` attribute of the ``Model`` returned the + ``getModel`` function. + + This corresponds to the ``top_k`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta2.types.GenerateTextResponse: + The response from the model, + including candidate completions. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [ + model, + prompt, + temperature, + candidate_count, + max_output_tokens, + top_p, + top_k, + ] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = text_service.GenerateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + if temperature is not None: + request.temperature = temperature + if candidate_count is not None: + request.candidate_count = candidate_count + if max_output_tokens is not None: + request.max_output_tokens = max_output_tokens + if top_p is not None: + request.top_p = top_p + if top_k is not None: + request.top_k = top_k + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_text, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def embed_text( + self, + request: Optional[Union[text_service.EmbedTextRequest, dict]] = None, + *, + model: Optional[str] = None, + text: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.EmbedTextResponse: + r"""Generates an embedding from the model given an input + message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta2 + + async def sample_embed_text(): + # Create a client + client = generativelanguage_v1beta2.TextServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta2.EmbedTextRequest( + model="model_value", + text="text_value", + ) + + # Make the request + response = await client.embed_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta2.types.EmbedTextRequest, dict]]): + The request object. Request to get a text embedding from + the model. + model (:class:`str`): + Required. The model name to use with + the format model=models/{model}. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + text (:class:`str`): + Required. The free-form input text + that the model will turn into an + embedding. + + This corresponds to the ``text`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta2.types.EmbedTextResponse: + The response to a EmbedTextRequest. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, text]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = text_service.EmbedTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if text is not None: + request.text = text + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.embed_text, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TextServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py new file mode 100644 index 000000000000..fc835d85f842 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py @@ -0,0 +1,783 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.ai.generativelanguage_v1beta2.types import safety, text_service + +from .transports.base import DEFAULT_CLIENT_INFO, TextServiceTransport +from .transports.grpc import TextServiceGrpcTransport +from .transports.grpc_asyncio import TextServiceGrpcAsyncIOTransport +from .transports.rest import TextServiceRestTransport + + +class TextServiceClientMeta(type): + """Metaclass for the TextService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[TextServiceTransport]] + _transport_registry["grpc"] = TextServiceGrpcTransport + _transport_registry["grpc_asyncio"] = TextServiceGrpcAsyncIOTransport + _transport_registry["rest"] = TextServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[TextServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TextServiceClient(metaclass=TextServiceClientMeta): + """API for using Generative Language Models (GLMs) trained to + generate text. + Also known as Large Language Models (LLM)s, these generate text + given an input prompt from the user. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TextServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TextServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TextServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the text service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TextServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TextServiceTransport): + # transport is a TextServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def generate_text( + self, + request: Optional[Union[text_service.GenerateTextRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[text_service.TextPrompt] = None, + temperature: Optional[float] = None, + candidate_count: Optional[int] = None, + max_output_tokens: Optional[int] = None, + top_p: Optional[float] = None, + top_k: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.GenerateTextResponse: + r"""Generates a response from the model given an input + message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta2 + + def sample_generate_text(): + # Create a client + client = generativelanguage_v1beta2.TextServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta2.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1beta2.GenerateTextRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.generate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta2.types.GenerateTextRequest, dict]): + The request object. Request to generate a text completion + response from the model. + model (str): + Required. The model name to use with + the format name=models/{model}. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (google.ai.generativelanguage_v1beta2.types.TextPrompt): + Required. The free-form input text + given to the model as a prompt. + Given a prompt, the model will generate + a TextCompletion response it predicts as + the completion of the input text. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + temperature (float): + Controls the randomness of the output. Note: The default + value varies by model, see the ``Model.temperature`` + attribute of the ``Model`` returned the ``getModel`` + function. + + Values can range from [0.0,1.0], inclusive. A value + closer to 1.0 will produce responses that are more + varied and creative, while a value closer to 0.0 will + typically result in more straightforward responses from + the model. + + This corresponds to the ``temperature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + candidate_count (int): + Number of generated responses to return. + + This value must be between [1, 8], inclusive. If unset, + this will default to 1. + + This corresponds to the ``candidate_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + max_output_tokens (int): + The maximum number of tokens to + include in a candidate. + If unset, this will default to 64. + + This corresponds to the ``max_output_tokens`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_p (float): + The maximum cumulative probability of tokens to consider + when sampling. + + The model uses combined Top-k and nucleus sampling. + + Tokens are sorted based on their assigned probabilities + so that only the most liekly tokens are considered. + Top-k sampling directly limits the maximum number of + tokens to consider, while Nucleus sampling limits number + of tokens based on the cumulative probability. + + Note: The default value varies by model, see the + ``Model.top_p`` attribute of the ``Model`` returned the + ``getModel`` function. + + This corresponds to the ``top_p`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_k (int): + The maximum number of tokens to consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most + probable tokens. Defaults to 40. + + Note: The default value varies by model, see the + ``Model.top_k`` attribute of the ``Model`` returned the + ``getModel`` function. + + This corresponds to the ``top_k`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta2.types.GenerateTextResponse: + The response from the model, + including candidate completions. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [ + model, + prompt, + temperature, + candidate_count, + max_output_tokens, + top_p, + top_k, + ] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a text_service.GenerateTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, text_service.GenerateTextRequest): + request = text_service.GenerateTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + if temperature is not None: + request.temperature = temperature + if candidate_count is not None: + request.candidate_count = candidate_count + if max_output_tokens is not None: + request.max_output_tokens = max_output_tokens + if top_p is not None: + request.top_p = top_p + if top_k is not None: + request.top_k = top_k + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_text] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def embed_text( + self, + request: Optional[Union[text_service.EmbedTextRequest, dict]] = None, + *, + model: Optional[str] = None, + text: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.EmbedTextResponse: + r"""Generates an embedding from the model given an input + message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta2 + + def sample_embed_text(): + # Create a client + client = generativelanguage_v1beta2.TextServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta2.EmbedTextRequest( + model="model_value", + text="text_value", + ) + + # Make the request + response = client.embed_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta2.types.EmbedTextRequest, dict]): + The request object. Request to get a text embedding from + the model. + model (str): + Required. The model name to use with + the format model=models/{model}. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + text (str): + Required. The free-form input text + that the model will turn into an + embedding. + + This corresponds to the ``text`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta2.types.EmbedTextResponse: + The response to a EmbedTextRequest. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, text]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a text_service.EmbedTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, text_service.EmbedTextRequest): + request = text_service.EmbedTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if text is not None: + request.text = text + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.embed_text] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "TextServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TextServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/__init__.py new file mode 100644 index 000000000000..ca9d72f7c372 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TextServiceTransport +from .grpc import TextServiceGrpcTransport +from .grpc_asyncio import TextServiceGrpcAsyncIOTransport +from .rest import TextServiceRestInterceptor, TextServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TextServiceTransport]] +_transport_registry["grpc"] = TextServiceGrpcTransport +_transport_registry["grpc_asyncio"] = TextServiceGrpcAsyncIOTransport +_transport_registry["rest"] = TextServiceRestTransport + +__all__ = ( + "TextServiceTransport", + "TextServiceGrpcTransport", + "TextServiceGrpcAsyncIOTransport", + "TextServiceRestTransport", + "TextServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/base.py new file mode 100644 index 000000000000..38e3ac4142fd --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/base.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta2 import gapic_version as package_version +from google.ai.generativelanguage_v1beta2.types import text_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class TextServiceTransport(abc.ABC): + """Abstract transport class for TextService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.generate_text: gapic_v1.method.wrap_method( + self.generate_text, + default_timeout=None, + client_info=client_info, + ), + self.embed_text: gapic_v1.method.wrap_method( + self.embed_text, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def generate_text( + self, + ) -> Callable[ + [text_service.GenerateTextRequest], + Union[ + text_service.GenerateTextResponse, + Awaitable[text_service.GenerateTextResponse], + ], + ]: + raise NotImplementedError() + + @property + def embed_text( + self, + ) -> Callable[ + [text_service.EmbedTextRequest], + Union[ + text_service.EmbedTextResponse, Awaitable[text_service.EmbedTextResponse] + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("TextServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/grpc.py new file mode 100644 index 000000000000..a80e4f539775 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/grpc.py @@ -0,0 +1,298 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1beta2.types import text_service + +from .base import DEFAULT_CLIENT_INFO, TextServiceTransport + + +class TextServiceGrpcTransport(TextServiceTransport): + """gRPC backend transport for TextService. + + API for using Generative Language Models (GLMs) trained to + generate text. + Also known as Large Language Models (LLM)s, these generate text + given an input prompt from the user. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def generate_text( + self, + ) -> Callable[ + [text_service.GenerateTextRequest], text_service.GenerateTextResponse + ]: + r"""Return a callable for the generate text method over gRPC. + + Generates a response from the model given an input + message. + + Returns: + Callable[[~.GenerateTextRequest], + ~.GenerateTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_text" not in self._stubs: + self._stubs["generate_text"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta2.TextService/GenerateText", + request_serializer=text_service.GenerateTextRequest.serialize, + response_deserializer=text_service.GenerateTextResponse.deserialize, + ) + return self._stubs["generate_text"] + + @property + def embed_text( + self, + ) -> Callable[[text_service.EmbedTextRequest], text_service.EmbedTextResponse]: + r"""Return a callable for the embed text method over gRPC. + + Generates an embedding from the model given an input + message. + + Returns: + Callable[[~.EmbedTextRequest], + ~.EmbedTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "embed_text" not in self._stubs: + self._stubs["embed_text"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta2.TextService/EmbedText", + request_serializer=text_service.EmbedTextRequest.serialize, + response_deserializer=text_service.EmbedTextResponse.deserialize, + ) + return self._stubs["embed_text"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("TextServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..66a1b69445aa --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/grpc_asyncio.py @@ -0,0 +1,299 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1beta2.types import text_service + +from .base import DEFAULT_CLIENT_INFO, TextServiceTransport +from .grpc import TextServiceGrpcTransport + + +class TextServiceGrpcAsyncIOTransport(TextServiceTransport): + """gRPC AsyncIO backend transport for TextService. + + API for using Generative Language Models (GLMs) trained to + generate text. + Also known as Large Language Models (LLM)s, these generate text + given an input prompt from the user. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def generate_text( + self, + ) -> Callable[ + [text_service.GenerateTextRequest], Awaitable[text_service.GenerateTextResponse] + ]: + r"""Return a callable for the generate text method over gRPC. + + Generates a response from the model given an input + message. + + Returns: + Callable[[~.GenerateTextRequest], + Awaitable[~.GenerateTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_text" not in self._stubs: + self._stubs["generate_text"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta2.TextService/GenerateText", + request_serializer=text_service.GenerateTextRequest.serialize, + response_deserializer=text_service.GenerateTextResponse.deserialize, + ) + return self._stubs["generate_text"] + + @property + def embed_text( + self, + ) -> Callable[ + [text_service.EmbedTextRequest], Awaitable[text_service.EmbedTextResponse] + ]: + r"""Return a callable for the embed text method over gRPC. + + Generates an embedding from the model given an input + message. + + Returns: + Callable[[~.EmbedTextRequest], + Awaitable[~.EmbedTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "embed_text" not in self._stubs: + self._stubs["embed_text"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta2.TextService/EmbedText", + request_serializer=text_service.EmbedTextRequest.serialize, + response_deserializer=text_service.EmbedTextResponse.deserialize, + ) + return self._stubs["embed_text"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("TextServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/rest.py new file mode 100644 index 000000000000..32fa714ff4a4 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/rest.py @@ -0,0 +1,454 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.ai.generativelanguage_v1beta2.types import text_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import TextServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TextServiceRestInterceptor: + """Interceptor for TextService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TextServiceRestTransport. + + .. code-block:: python + class MyCustomTextServiceInterceptor(TextServiceRestInterceptor): + def pre_embed_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_embed_text(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_text(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TextServiceRestTransport(interceptor=MyCustomTextServiceInterceptor()) + client = TextServiceClient(transport=transport) + + + """ + + def pre_embed_text( + self, + request: text_service.EmbedTextRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[text_service.EmbedTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for embed_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextService server. + """ + return request, metadata + + def post_embed_text( + self, response: text_service.EmbedTextResponse + ) -> text_service.EmbedTextResponse: + """Post-rpc interceptor for embed_text + + Override in a subclass to manipulate the response + after it is returned by the TextService server but before + it is returned to user code. + """ + return response + + def pre_generate_text( + self, + request: text_service.GenerateTextRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[text_service.GenerateTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextService server. + """ + return request, metadata + + def post_generate_text( + self, response: text_service.GenerateTextResponse + ) -> text_service.GenerateTextResponse: + """Post-rpc interceptor for generate_text + + Override in a subclass to manipulate the response + after it is returned by the TextService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TextServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TextServiceRestInterceptor + + +class TextServiceRestTransport(TextServiceTransport): + """REST backend transport for TextService. + + API for using Generative Language Models (GLMs) trained to + generate text. + Also known as Large Language Models (LLM)s, these generate text + given an input prompt from the user. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[TextServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TextServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _EmbedText(TextServiceRestStub): + def __hash__(self): + return hash("EmbedText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: text_service.EmbedTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.EmbedTextResponse: + r"""Call the embed text method over HTTP. + + Args: + request (~.text_service.EmbedTextRequest): + The request object. Request to get a text embedding from + the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.text_service.EmbedTextResponse: + The response to a EmbedTextRequest. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/{model=models/*}:embedText", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_embed_text(request, metadata) + pb_request = text_service.EmbedTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = text_service.EmbedTextResponse() + pb_resp = text_service.EmbedTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_embed_text(resp) + return resp + + class _GenerateText(TextServiceRestStub): + def __hash__(self): + return hash("GenerateText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: text_service.GenerateTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.GenerateTextResponse: + r"""Call the generate text method over HTTP. + + Args: + request (~.text_service.GenerateTextRequest): + The request object. Request to generate a text completion + response from the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.text_service.GenerateTextResponse: + The response from the model, + including candidate completions. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/{model=models/*}:generateText", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_generate_text(request, metadata) + pb_request = text_service.GenerateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = text_service.GenerateTextResponse() + pb_resp = text_service.GenerateTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_text(resp) + return resp + + @property + def embed_text( + self, + ) -> Callable[[text_service.EmbedTextRequest], text_service.EmbedTextResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EmbedText(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_text( + self, + ) -> Callable[ + [text_service.GenerateTextRequest], text_service.GenerateTextResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateText(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("TextServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/__init__.py new file mode 100644 index 000000000000..92e79a4fb754 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/__init__.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .citation import CitationMetadata, CitationSource +from .discuss_service import ( + CountMessageTokensRequest, + CountMessageTokensResponse, + Example, + GenerateMessageRequest, + GenerateMessageResponse, + Message, + MessagePrompt, +) +from .model import Model +from .model_service import GetModelRequest, ListModelsRequest, ListModelsResponse +from .safety import ( + ContentFilter, + HarmCategory, + SafetyFeedback, + SafetyRating, + SafetySetting, +) +from .text_service import ( + Embedding, + EmbedTextRequest, + EmbedTextResponse, + GenerateTextRequest, + GenerateTextResponse, + TextCompletion, + TextPrompt, +) + +__all__ = ( + "CitationMetadata", + "CitationSource", + "CountMessageTokensRequest", + "CountMessageTokensResponse", + "Example", + "GenerateMessageRequest", + "GenerateMessageResponse", + "Message", + "MessagePrompt", + "Model", + "GetModelRequest", + "ListModelsRequest", + "ListModelsResponse", + "ContentFilter", + "SafetyFeedback", + "SafetyRating", + "SafetySetting", + "HarmCategory", + "Embedding", + "EmbedTextRequest", + "EmbedTextResponse", + "GenerateTextRequest", + "GenerateTextResponse", + "TextCompletion", + "TextPrompt", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/citation.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/citation.py new file mode 100644 index 000000000000..193ca7b5683b --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/citation.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta2", + manifest={ + "CitationMetadata", + "CitationSource", + }, +) + + +class CitationMetadata(proto.Message): + r"""A collection of source attributions for a piece of content. + + Attributes: + citation_sources (MutableSequence[google.ai.generativelanguage_v1beta2.types.CitationSource]): + Citations to sources for a specific response. + """ + + citation_sources: MutableSequence["CitationSource"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="CitationSource", + ) + + +class CitationSource(proto.Message): + r"""A citation to a source for a portion of a specific response. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_index (int): + Optional. Start of segment of the response + that is attributed to this source. + + Index indicates the start of the segment, + measured in bytes. + + This field is a member of `oneof`_ ``_start_index``. + end_index (int): + Optional. End of the attributed segment, + exclusive. + + This field is a member of `oneof`_ ``_end_index``. + uri (str): + Optional. URI that is attributed as a source + for a portion of the text. + + This field is a member of `oneof`_ ``_uri``. + license_ (str): + Optional. License for the GitHub project that + is attributed as a source for segment. + + License info is required for code citations. + + This field is a member of `oneof`_ ``_license``. + """ + + start_index: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + end_index: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + uri: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + license_: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/discuss_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/discuss_service.py new file mode 100644 index 000000000000..b4b64e6f03ea --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/discuss_service.py @@ -0,0 +1,357 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ai.generativelanguage_v1beta2.types import citation, safety + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta2", + manifest={ + "GenerateMessageRequest", + "GenerateMessageResponse", + "Message", + "MessagePrompt", + "Example", + "CountMessageTokensRequest", + "CountMessageTokensResponse", + }, +) + + +class GenerateMessageRequest(proto.Message): + r"""Request to generate a message response from the model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + model (str): + Required. The name of the model to use. + + Format: ``name=models/{model}``. + prompt (google.ai.generativelanguage_v1beta2.types.MessagePrompt): + Required. The structured textual input given + to the model as a prompt. + Given a + prompt, the model will return what it predicts + is the next message in the discussion. + temperature (float): + Optional. Controls the randomness of the output. + + Values can range over ``[0.0,1.0]``, inclusive. A value + closer to ``1.0`` will produce responses that are more + varied, while a value closer to ``0.0`` will typically + result in less surprising responses from the model. + + This field is a member of `oneof`_ ``_temperature``. + candidate_count (int): + Optional. The number of generated response messages to + return. + + This value must be between ``[1, 8]``, inclusive. If unset, + this will default to ``1``. + + This field is a member of `oneof`_ ``_candidate_count``. + top_p (float): + Optional. The maximum cumulative probability of tokens to + consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Nucleus sampling considers the smallest set of tokens whose + probability sum is at least ``top_p``. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most probable + tokens. + + This field is a member of `oneof`_ ``_top_k``. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + prompt: "MessagePrompt" = proto.Field( + proto.MESSAGE, + number=2, + message="MessagePrompt", + ) + temperature: float = proto.Field( + proto.FLOAT, + number=3, + optional=True, + ) + candidate_count: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=5, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=6, + optional=True, + ) + + +class GenerateMessageResponse(proto.Message): + r"""The response from the model. + This includes candidate messages and + conversation history in the form of chronologically-ordered + messages. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1beta2.types.Message]): + Candidate response messages from the model. + messages (MutableSequence[google.ai.generativelanguage_v1beta2.types.Message]): + The conversation history used by the model. + filters (MutableSequence[google.ai.generativelanguage_v1beta2.types.ContentFilter]): + A set of content filtering metadata for the prompt and + response text. + + This indicates which ``SafetyCategory``\ (s) blocked a + candidate from this response, the lowest ``HarmProbability`` + that triggered a block, and the HarmThreshold setting for + that category. This indicates the smallest change to the + ``SafetySettings`` that would be necessary to unblock at + least 1 response. + + The blocking is configured by the ``SafetySettings`` in the + request (or the default ``SafetySettings`` of the API). + """ + + candidates: MutableSequence["Message"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Message", + ) + messages: MutableSequence["Message"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Message", + ) + filters: MutableSequence[safety.ContentFilter] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=safety.ContentFilter, + ) + + +class Message(proto.Message): + r"""The base unit of structured text. + + A ``Message`` includes an ``author`` and the ``content`` of the + ``Message``. + + The ``author`` is used to tag messages when they are fed to the + model as text. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + author (str): + Optional. The author of this Message. + This serves as a key for tagging + the content of this Message when it is fed to + the model as text. + The author can be any alphanumeric string. + content (str): + Required. The text content of the structured ``Message``. + citation_metadata (google.ai.generativelanguage_v1beta2.types.CitationMetadata): + Output only. Citation information for model-generated + ``content`` in this ``Message``. + + If this ``Message`` was generated as output from the model, + this field may be populated with attribution information for + any text included in the ``content``. This field is used + only on output. + + This field is a member of `oneof`_ ``_citation_metadata``. + """ + + author: str = proto.Field( + proto.STRING, + number=1, + ) + content: str = proto.Field( + proto.STRING, + number=2, + ) + citation_metadata: citation.CitationMetadata = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=citation.CitationMetadata, + ) + + +class MessagePrompt(proto.Message): + r"""All of the structured input text passed to the model as a prompt. + + A ``MessagePrompt`` contains a structured set of fields that provide + context for the conversation, examples of user input/model output + message pairs that prime the model to respond in different ways, and + the conversation history or list of messages representing the + alternating turns of the conversation between the user and the + model. + + Attributes: + context (str): + Optional. Text that should be provided to the model first to + ground the response. + + If not empty, this ``context`` will be given to the model + first before the ``examples`` and ``messages``. When using a + ``context`` be sure to provide it with every request to + maintain continuity. + + This field can be a description of your prompt to the model + to help provide context and guide the responses. Examples: + "Translate the phrase from English to French." or "Given a + statement, classify the sentiment as happy, sad or neutral." + + Anything included in this field will take precedence over + message history if the total input size exceeds the model's + ``input_token_limit`` and the input request is truncated. + examples (MutableSequence[google.ai.generativelanguage_v1beta2.types.Example]): + Optional. Examples of what the model should generate. + + This includes both user input and the response that the + model should emulate. + + These ``examples`` are treated identically to conversation + messages except that they take precedence over the history + in ``messages``: If the total input size exceeds the model's + ``input_token_limit`` the input will be truncated. Items + will be dropped from ``messages`` before ``examples``. + messages (MutableSequence[google.ai.generativelanguage_v1beta2.types.Message]): + Required. A snapshot of the recent conversation history + sorted chronologically. + + Turns alternate between two authors. + + If the total input size exceeds the model's + ``input_token_limit`` the input will be truncated: The + oldest items will be dropped from ``messages``. + """ + + context: str = proto.Field( + proto.STRING, + number=1, + ) + examples: MutableSequence["Example"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Example", + ) + messages: MutableSequence["Message"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Message", + ) + + +class Example(proto.Message): + r"""An input/output example used to instruct the Model. + It demonstrates how the model should respond or format its + response. + + Attributes: + input (google.ai.generativelanguage_v1beta2.types.Message): + An example of an input ``Message`` from the user. + output (google.ai.generativelanguage_v1beta2.types.Message): + An example of what the model should output + given the input. + """ + + input: "Message" = proto.Field( + proto.MESSAGE, + number=1, + message="Message", + ) + output: "Message" = proto.Field( + proto.MESSAGE, + number=2, + message="Message", + ) + + +class CountMessageTokensRequest(proto.Message): + r"""Counts the number of tokens in the ``prompt`` sent to a model. + + Models may tokenize text differently, so each model may return a + different ``token_count``. + + Attributes: + model (str): + Required. The model's resource name. This serves as an ID + for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + prompt (google.ai.generativelanguage_v1beta2.types.MessagePrompt): + Required. The prompt, whose token count is to + be returned. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + prompt: "MessagePrompt" = proto.Field( + proto.MESSAGE, + number=2, + message="MessagePrompt", + ) + + +class CountMessageTokensResponse(proto.Message): + r"""A response from ``CountMessageTokens``. + + It returns the model's ``token_count`` for the ``prompt``. + + Attributes: + token_count (int): + The number of tokens that the ``model`` tokenizes the + ``prompt`` into. + + Always non-negative. + """ + + token_count: int = proto.Field( + proto.INT32, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/model.py new file mode 100644 index 000000000000..577b33c0e23c --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/model.py @@ -0,0 +1,154 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta2", + manifest={ + "Model", + }, +) + + +class Model(proto.Message): + r"""Information about a Generative Language Model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The resource name of the ``Model``. + + Format: ``models/{model}`` with a ``{model}`` naming + convention of: + + - "{base_model_id}-{version}" + + Examples: + + - ``models/chat-bison-001`` + base_model_id (str): + Required. The name of the base model, pass this to the + generation request. + + Examples: + + - ``chat-bison`` + version (str): + Required. The version number of the model. + This represents the major version + display_name (str): + The human-readable name of the model. E.g. + "Chat Bison". + The name can be up to 128 characters long and + can consist of any UTF-8 characters. + description (str): + A short description of the model. + input_token_limit (int): + Maximum number of input tokens allowed for + this model. + output_token_limit (int): + Maximum number of output tokens available for + this model. + supported_generation_methods (MutableSequence[str]): + The model's supported generation methods. + + The method names are defined as Pascal case strings, such as + ``generateMessage`` which correspond to API methods. + temperature (float): + Controls the randomness of the output. + + Values can range over ``[0.0,1.0]``, inclusive. A value + closer to ``1.0`` will produce responses that are more + varied, while a value closer to ``0.0`` will typically + result in less surprising responses from the model. This + value specifies default to be used by the backend while + making the call to the model. + + This field is a member of `oneof`_ ``_temperature``. + top_p (float): + For Nucleus sampling. + + Nucleus sampling considers the smallest set of tokens whose + probability sum is at least ``top_p``. This value specifies + default to be used by the backend while making the call to + the model. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + For Top-k sampling. + + Top-k sampling considers the set of ``top_k`` most probable + tokens. This value specifies default to be used by the + backend while making the call to the model. + + This field is a member of `oneof`_ ``_top_k``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + base_model_id: str = proto.Field( + proto.STRING, + number=2, + ) + version: str = proto.Field( + proto.STRING, + number=3, + ) + display_name: str = proto.Field( + proto.STRING, + number=4, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + input_token_limit: int = proto.Field( + proto.INT32, + number=6, + ) + output_token_limit: int = proto.Field( + proto.INT32, + number=7, + ) + supported_generation_methods: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + temperature: float = proto.Field( + proto.FLOAT, + number=9, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=10, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=11, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/model_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/model_service.py new file mode 100644 index 000000000000..b44bab1c8476 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/model_service.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ai.generativelanguage_v1beta2.types import model + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta2", + manifest={ + "GetModelRequest", + "ListModelsRequest", + "ListModelsResponse", + }, +) + + +class GetModelRequest(proto.Message): + r"""Request for getting information about a specific Model. + + Attributes: + name (str): + Required. The resource name of the model. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListModelsRequest(proto.Message): + r"""Request for listing all Models. + + Attributes: + page_size (int): + The maximum number of ``Models`` to return (per page). + + The service may return fewer models. If unspecified, at most + 50 models will be returned per page. This method returns at + most 1000 models per page, even if you pass a larger + page_size. + page_token (str): + A page token, received from a previous ``ListModels`` call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListModels`` must match the call that provided the page + token. + """ + + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListModelsResponse(proto.Message): + r"""Response from ``ListModel`` containing a paginated list of Models. + + Attributes: + models (MutableSequence[google.ai.generativelanguage_v1beta2.types.Model]): + The returned Models. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. + + If this field is omitted, there are no more pages. + """ + + @property + def raw_page(self): + return self + + models: MutableSequence[model.Model] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=model.Model, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/safety.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/safety.py new file mode 100644 index 000000000000..5eafbfa364eb --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/safety.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta2", + manifest={ + "HarmCategory", + "ContentFilter", + "SafetyFeedback", + "SafetyRating", + "SafetySetting", + }, +) + + +class HarmCategory(proto.Enum): + r"""The category of a rating. + These categories cover various kinds of harms that developers + may wish to adjust. + + Values: + HARM_CATEGORY_UNSPECIFIED (0): + Category is unspecified. + HARM_CATEGORY_DEROGATORY (1): + Negative or harmful comments targeting + identity and/or protected attribute. + HARM_CATEGORY_TOXICITY (2): + Content that is rude, disrepspectful, or + profane. + HARM_CATEGORY_VIOLENCE (3): + Describes scenarios depictng violence against + an individual or group, or general descriptions + of gore. + HARM_CATEGORY_SEXUAL (4): + Contains references to sexual acts or other + lewd content. + HARM_CATEGORY_MEDICAL (5): + Promotes unchecked medical advice. + HARM_CATEGORY_DANGEROUS (6): + Dangerous content that promotes, facilitates, + or encourages harmful acts. + """ + HARM_CATEGORY_UNSPECIFIED = 0 + HARM_CATEGORY_DEROGATORY = 1 + HARM_CATEGORY_TOXICITY = 2 + HARM_CATEGORY_VIOLENCE = 3 + HARM_CATEGORY_SEXUAL = 4 + HARM_CATEGORY_MEDICAL = 5 + HARM_CATEGORY_DANGEROUS = 6 + + +class ContentFilter(proto.Message): + r"""Content filtering metadata associated with processing a + single request. + ContentFilter contains a reason and an optional supporting + string. The reason may be unspecified. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + reason (google.ai.generativelanguage_v1beta2.types.ContentFilter.BlockedReason): + The reason content was blocked during request + processing. + message (str): + A string that describes the filtering + behavior in more detail. + + This field is a member of `oneof`_ ``_message``. + """ + + class BlockedReason(proto.Enum): + r"""A list of reasons why content may have been blocked. + + Values: + BLOCKED_REASON_UNSPECIFIED (0): + A blocked reason was not specified. + SAFETY (1): + Content was blocked by safety settings. + OTHER (2): + Content was blocked, but the reason is + uncategorized. + """ + BLOCKED_REASON_UNSPECIFIED = 0 + SAFETY = 1 + OTHER = 2 + + reason: BlockedReason = proto.Field( + proto.ENUM, + number=1, + enum=BlockedReason, + ) + message: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class SafetyFeedback(proto.Message): + r"""Safety feedback for an entire request. + This field is populated if content in the input and/or response + is blocked due to safety settings. SafetyFeedback may not exist + for every HarmCategory. Each SafetyFeedback will return the + safety settings used by the request as well as the lowest + HarmProbability that should be allowed in order to return a + result. + + Attributes: + rating (google.ai.generativelanguage_v1beta2.types.SafetyRating): + Safety rating evaluated from content. + setting (google.ai.generativelanguage_v1beta2.types.SafetySetting): + Safety settings applied to the request. + """ + + rating: "SafetyRating" = proto.Field( + proto.MESSAGE, + number=1, + message="SafetyRating", + ) + setting: "SafetySetting" = proto.Field( + proto.MESSAGE, + number=2, + message="SafetySetting", + ) + + +class SafetyRating(proto.Message): + r"""Safety rating for a piece of content. + The safety rating contains the category of harm and the harm + probability level in that category for a piece of content. + Content is classified for safety across a number of harm + categories and the probability of the harm classification is + included here. + + Attributes: + category (google.ai.generativelanguage_v1beta2.types.HarmCategory): + Required. The category for this rating. + probability (google.ai.generativelanguage_v1beta2.types.SafetyRating.HarmProbability): + Required. The probability of harm for this + content. + """ + + class HarmProbability(proto.Enum): + r"""The probability that a piece of content is harmful. + The classification system gives the probability of the content + being unsafe. This does not indicate the severity of harm for a + piece of content. + + Values: + HARM_PROBABILITY_UNSPECIFIED (0): + Probability is unspecified. + NEGLIGIBLE (1): + Content has a negligible chance of being + unsafe. + LOW (2): + Content has a low chance of being unsafe. + MEDIUM (3): + Content has a medium chance of being unsafe. + HIGH (4): + Content has a high chance of being unsafe. + """ + HARM_PROBABILITY_UNSPECIFIED = 0 + NEGLIGIBLE = 1 + LOW = 2 + MEDIUM = 3 + HIGH = 4 + + category: "HarmCategory" = proto.Field( + proto.ENUM, + number=3, + enum="HarmCategory", + ) + probability: HarmProbability = proto.Field( + proto.ENUM, + number=4, + enum=HarmProbability, + ) + + +class SafetySetting(proto.Message): + r"""Safety setting, affecting the safety-blocking behavior. + Passing a safety setting for a category changes the allowed + proability that content is blocked. + + Attributes: + category (google.ai.generativelanguage_v1beta2.types.HarmCategory): + Required. The category for this setting. + threshold (google.ai.generativelanguage_v1beta2.types.SafetySetting.HarmBlockThreshold): + Required. Controls the probability threshold + at which harm is blocked. + """ + + class HarmBlockThreshold(proto.Enum): + r"""Block at and beyond a specified harm probability. + + Values: + HARM_BLOCK_THRESHOLD_UNSPECIFIED (0): + Threshold is unspecified. + BLOCK_LOW_AND_ABOVE (1): + Content with NEGLIGIBLE will be allowed. + BLOCK_MEDIUM_AND_ABOVE (2): + Content with NEGLIGIBLE and LOW will be + allowed. + BLOCK_ONLY_HIGH (3): + Content with NEGLIGIBLE, LOW, and MEDIUM will + be allowed. + BLOCK_NONE (4): + All content will be allowed. + """ + HARM_BLOCK_THRESHOLD_UNSPECIFIED = 0 + BLOCK_LOW_AND_ABOVE = 1 + BLOCK_MEDIUM_AND_ABOVE = 2 + BLOCK_ONLY_HIGH = 3 + BLOCK_NONE = 4 + + category: "HarmCategory" = proto.Field( + proto.ENUM, + number=3, + enum="HarmCategory", + ) + threshold: HarmBlockThreshold = proto.Field( + proto.ENUM, + number=4, + enum=HarmBlockThreshold, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/text_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/text_service.py new file mode 100644 index 000000000000..15751c6b84b6 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/types/text_service.py @@ -0,0 +1,329 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ai.generativelanguage_v1beta2.types import citation, safety + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta2", + manifest={ + "GenerateTextRequest", + "GenerateTextResponse", + "TextPrompt", + "TextCompletion", + "EmbedTextRequest", + "EmbedTextResponse", + "Embedding", + }, +) + + +class GenerateTextRequest(proto.Message): + r"""Request to generate a text completion response from the + model. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + model (str): + Required. The model name to use with the + format name=models/{model}. + prompt (google.ai.generativelanguage_v1beta2.types.TextPrompt): + Required. The free-form input text given to + the model as a prompt. + Given a prompt, the model will generate a + TextCompletion response it predicts as the + completion of the input text. + temperature (float): + Controls the randomness of the output. Note: The default + value varies by model, see the ``Model.temperature`` + attribute of the ``Model`` returned the ``getModel`` + function. + + Values can range from [0.0,1.0], inclusive. A value closer + to 1.0 will produce responses that are more varied and + creative, while a value closer to 0.0 will typically result + in more straightforward responses from the model. + + This field is a member of `oneof`_ ``_temperature``. + candidate_count (int): + Number of generated responses to return. + + This value must be between [1, 8], inclusive. If unset, this + will default to 1. + + This field is a member of `oneof`_ ``_candidate_count``. + max_output_tokens (int): + The maximum number of tokens to include in a + candidate. + If unset, this will default to 64. + + This field is a member of `oneof`_ ``_max_output_tokens``. + top_p (float): + The maximum cumulative probability of tokens to consider + when sampling. + + The model uses combined Top-k and nucleus sampling. + + Tokens are sorted based on their assigned probabilities so + that only the most liekly tokens are considered. Top-k + sampling directly limits the maximum number of tokens to + consider, while Nucleus sampling limits number of tokens + based on the cumulative probability. + + Note: The default value varies by model, see the + ``Model.top_p`` attribute of the ``Model`` returned the + ``getModel`` function. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + The maximum number of tokens to consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most probable + tokens. Defaults to 40. + + Note: The default value varies by model, see the + ``Model.top_k`` attribute of the ``Model`` returned the + ``getModel`` function. + + This field is a member of `oneof`_ ``_top_k``. + safety_settings (MutableSequence[google.ai.generativelanguage_v1beta2.types.SafetySetting]): + A list of unique ``SafetySetting`` instances for blocking + unsafe content. + + that will be enforced on the ``GenerateTextRequest.prompt`` + and ``GenerateTextResponse.candidates``. There should not be + more than one setting for each ``SafetyCategory`` type. The + API will block any prompts and responses that fail to meet + the thresholds set by these settings. This list overrides + the default settings for each ``SafetyCategory`` specified + in the safety_settings. If there is no ``SafetySetting`` for + a given ``SafetyCategory`` provided in the list, the API + will use the default safety setting for that category. + stop_sequences (MutableSequence[str]): + The set of character sequences (up to 5) that + will stop output generation. If specified, the + API will stop at the first appearance of a stop + sequence. The stop sequence will not be included + as part of the response. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + prompt: "TextPrompt" = proto.Field( + proto.MESSAGE, + number=2, + message="TextPrompt", + ) + temperature: float = proto.Field( + proto.FLOAT, + number=3, + optional=True, + ) + candidate_count: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + max_output_tokens: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=6, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=7, + optional=True, + ) + safety_settings: MutableSequence[safety.SafetySetting] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=safety.SafetySetting, + ) + stop_sequences: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) + + +class GenerateTextResponse(proto.Message): + r"""The response from the model, including candidate completions. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1beta2.types.TextCompletion]): + Candidate responses from the model. + filters (MutableSequence[google.ai.generativelanguage_v1beta2.types.ContentFilter]): + A set of content filtering metadata for the prompt and + response text. + + This indicates which ``SafetyCategory``\ (s) blocked a + candidate from this response, the lowest ``HarmProbability`` + that triggered a block, and the HarmThreshold setting for + that category. This indicates the smallest change to the + ``SafetySettings`` that would be necessary to unblock at + least 1 response. + + The blocking is configured by the ``SafetySettings`` in the + request (or the default ``SafetySettings`` of the API). + safety_feedback (MutableSequence[google.ai.generativelanguage_v1beta2.types.SafetyFeedback]): + Returns any safety feedback related to + content filtering. + """ + + candidates: MutableSequence["TextCompletion"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="TextCompletion", + ) + filters: MutableSequence[safety.ContentFilter] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=safety.ContentFilter, + ) + safety_feedback: MutableSequence[safety.SafetyFeedback] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=safety.SafetyFeedback, + ) + + +class TextPrompt(proto.Message): + r"""Text given to the model as a prompt. + The Model will use this TextPrompt to Generate a text + completion. + + Attributes: + text (str): + Required. The prompt text. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + + +class TextCompletion(proto.Message): + r"""Output text returned from a model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + output (str): + Output only. The generated text returned from + the model. + safety_ratings (MutableSequence[google.ai.generativelanguage_v1beta2.types.SafetyRating]): + Ratings for the safety of a response. + There is at most one rating per category. + citation_metadata (google.ai.generativelanguage_v1beta2.types.CitationMetadata): + Output only. Citation information for model-generated + ``output`` in this ``TextCompletion``. + + This field may be populated with attribution information for + any text included in the ``output``. + + This field is a member of `oneof`_ ``_citation_metadata``. + """ + + output: str = proto.Field( + proto.STRING, + number=1, + ) + safety_ratings: MutableSequence[safety.SafetyRating] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=safety.SafetyRating, + ) + citation_metadata: citation.CitationMetadata = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=citation.CitationMetadata, + ) + + +class EmbedTextRequest(proto.Message): + r"""Request to get a text embedding from the model. + + Attributes: + model (str): + Required. The model name to use with the + format model=models/{model}. + text (str): + Required. The free-form input text that the + model will turn into an embedding. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + text: str = proto.Field( + proto.STRING, + number=2, + ) + + +class EmbedTextResponse(proto.Message): + r"""The response to a EmbedTextRequest. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + embedding (google.ai.generativelanguage_v1beta2.types.Embedding): + Output only. The embedding generated from the + input text. + + This field is a member of `oneof`_ ``_embedding``. + """ + + embedding: "Embedding" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="Embedding", + ) + + +class Embedding(proto.Message): + r"""A list of floats representing the embedding. + + Attributes: + value (MutableSequence[float]): + The embedding values. + """ + + value: MutableSequence[float] = proto.RepeatedField( + proto.FLOAT, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/mypy.ini b/packages/google-ai-generativelanguage/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-ai-generativelanguage/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-ai-generativelanguage/noxfile.py b/packages/google-ai-generativelanguage/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-ai-generativelanguage/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-ai-generativelanguage/renovate.json b/packages/google-ai-generativelanguage/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-ai-generativelanguage/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_discuss_service_count_message_tokens_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_discuss_service_count_message_tokens_async.py new file mode 100644 index 000000000000..716d9240f7c0 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_discuss_service_count_message_tokens_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountMessageTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta2_generated_DiscussService_CountMessageTokens_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta2 + + +async def sample_count_message_tokens(): + # Create a client + client = generativelanguage_v1beta2.DiscussServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta2.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta2.CountMessageTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.count_message_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta2_generated_DiscussService_CountMessageTokens_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_discuss_service_count_message_tokens_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_discuss_service_count_message_tokens_sync.py new file mode 100644 index 000000000000..2c626c0c0687 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_discuss_service_count_message_tokens_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountMessageTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta2_generated_DiscussService_CountMessageTokens_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta2 + + +def sample_count_message_tokens(): + # Create a client + client = generativelanguage_v1beta2.DiscussServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta2.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta2.CountMessageTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.count_message_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta2_generated_DiscussService_CountMessageTokens_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_discuss_service_generate_message_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_discuss_service_generate_message_async.py new file mode 100644 index 000000000000..38b7156f7441 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_discuss_service_generate_message_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateMessage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta2_generated_DiscussService_GenerateMessage_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta2 + + +async def sample_generate_message(): + # Create a client + client = generativelanguage_v1beta2.DiscussServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta2.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta2.GenerateMessageRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.generate_message(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta2_generated_DiscussService_GenerateMessage_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_discuss_service_generate_message_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_discuss_service_generate_message_sync.py new file mode 100644 index 000000000000..d7d979d50c79 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_discuss_service_generate_message_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateMessage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta2_generated_DiscussService_GenerateMessage_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta2 + + +def sample_generate_message(): + # Create a client + client = generativelanguage_v1beta2.DiscussServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta2.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta2.GenerateMessageRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.generate_message(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta2_generated_DiscussService_GenerateMessage_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_model_service_get_model_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_model_service_get_model_async.py new file mode 100644 index 000000000000..528195255b8f --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_model_service_get_model_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta2_generated_ModelService_GetModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta2 + + +async def sample_get_model(): + # Create a client + client = generativelanguage_v1beta2.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta2.GetModelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta2_generated_ModelService_GetModel_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_model_service_get_model_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_model_service_get_model_sync.py new file mode 100644 index 000000000000..e0b5b79f63b5 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_model_service_get_model_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta2_generated_ModelService_GetModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta2 + + +def sample_get_model(): + # Create a client + client = generativelanguage_v1beta2.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta2.GetModelRequest( + name="name_value", + ) + + # Make the request + response = client.get_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta2_generated_ModelService_GetModel_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_model_service_list_models_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_model_service_list_models_async.py new file mode 100644 index 000000000000..439465f84b67 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_model_service_list_models_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta2_generated_ModelService_ListModels_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta2 + + +async def sample_list_models(): + # Create a client + client = generativelanguage_v1beta2.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta2.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1beta2_generated_ModelService_ListModels_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_model_service_list_models_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_model_service_list_models_sync.py new file mode 100644 index 000000000000..d98bf0372d8c --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_model_service_list_models_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta2_generated_ModelService_ListModels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta2 + + +def sample_list_models(): + # Create a client + client = generativelanguage_v1beta2.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta2.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1beta2_generated_ModelService_ListModels_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_text_service_embed_text_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_text_service_embed_text_async.py new file mode 100644 index 000000000000..5e182bcf6869 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_text_service_embed_text_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EmbedText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta2_generated_TextService_EmbedText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta2 + + +async def sample_embed_text(): + # Create a client + client = generativelanguage_v1beta2.TextServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta2.EmbedTextRequest( + model="model_value", + text="text_value", + ) + + # Make the request + response = await client.embed_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta2_generated_TextService_EmbedText_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_text_service_embed_text_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_text_service_embed_text_sync.py new file mode 100644 index 000000000000..607685ec8c28 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_text_service_embed_text_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EmbedText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta2_generated_TextService_EmbedText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta2 + + +def sample_embed_text(): + # Create a client + client = generativelanguage_v1beta2.TextServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta2.EmbedTextRequest( + model="model_value", + text="text_value", + ) + + # Make the request + response = client.embed_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta2_generated_TextService_EmbedText_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_text_service_generate_text_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_text_service_generate_text_async.py new file mode 100644 index 000000000000..ebb469746a09 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_text_service_generate_text_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta2_generated_TextService_GenerateText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta2 + + +async def sample_generate_text(): + # Create a client + client = generativelanguage_v1beta2.TextServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta2.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1beta2.GenerateTextRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.generate_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta2_generated_TextService_GenerateText_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_text_service_generate_text_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_text_service_generate_text_sync.py new file mode 100644 index 000000000000..c23f3a51e691 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta2_generated_text_service_generate_text_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta2_generated_TextService_GenerateText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta2 + + +def sample_generate_text(): + # Create a client + client = generativelanguage_v1beta2.TextServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta2.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1beta2.GenerateTextRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.generate_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta2_generated_TextService_GenerateText_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json new file mode 100644 index 000000000000..3f34baddce85 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json @@ -0,0 +1,1093 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.ai.generativelanguage.v1beta2", + "version": "v1beta2" + } + ], + "language": "PYTHON", + "name": "google-ai-generativelanguage", + "version": "0.2.1" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta2.DiscussServiceAsyncClient", + "shortName": "DiscussServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta2.DiscussServiceAsyncClient.count_message_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1beta2.DiscussService.CountMessageTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1beta2.DiscussService", + "shortName": "DiscussService" + }, + "shortName": "CountMessageTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta2.types.CountMessageTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1beta2.types.MessagePrompt" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta2.types.CountMessageTokensResponse", + "shortName": "count_message_tokens" + }, + "description": "Sample for CountMessageTokens", + "file": "generativelanguage_v1beta2_generated_discuss_service_count_message_tokens_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta2_generated_DiscussService_CountMessageTokens_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta2_generated_discuss_service_count_message_tokens_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta2.DiscussServiceClient", + "shortName": "DiscussServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta2.DiscussServiceClient.count_message_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1beta2.DiscussService.CountMessageTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1beta2.DiscussService", + "shortName": "DiscussService" + }, + "shortName": "CountMessageTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta2.types.CountMessageTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1beta2.types.MessagePrompt" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta2.types.CountMessageTokensResponse", + "shortName": "count_message_tokens" + }, + "description": "Sample for CountMessageTokens", + "file": "generativelanguage_v1beta2_generated_discuss_service_count_message_tokens_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta2_generated_DiscussService_CountMessageTokens_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta2_generated_discuss_service_count_message_tokens_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta2.DiscussServiceAsyncClient", + "shortName": "DiscussServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta2.DiscussServiceAsyncClient.generate_message", + "method": { + "fullName": "google.ai.generativelanguage.v1beta2.DiscussService.GenerateMessage", + "service": { + "fullName": "google.ai.generativelanguage.v1beta2.DiscussService", + "shortName": "DiscussService" + }, + "shortName": "GenerateMessage" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta2.types.GenerateMessageRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1beta2.types.MessagePrompt" + }, + { + "name": "temperature", + "type": "float" + }, + { + "name": "candidate_count", + "type": "int" + }, + { + "name": "top_p", + "type": "float" + }, + { + "name": "top_k", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta2.types.GenerateMessageResponse", + "shortName": "generate_message" + }, + "description": "Sample for GenerateMessage", + "file": "generativelanguage_v1beta2_generated_discuss_service_generate_message_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta2_generated_DiscussService_GenerateMessage_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta2_generated_discuss_service_generate_message_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta2.DiscussServiceClient", + "shortName": "DiscussServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta2.DiscussServiceClient.generate_message", + "method": { + "fullName": "google.ai.generativelanguage.v1beta2.DiscussService.GenerateMessage", + "service": { + "fullName": "google.ai.generativelanguage.v1beta2.DiscussService", + "shortName": "DiscussService" + }, + "shortName": "GenerateMessage" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta2.types.GenerateMessageRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1beta2.types.MessagePrompt" + }, + { + "name": "temperature", + "type": "float" + }, + { + "name": "candidate_count", + "type": "int" + }, + { + "name": "top_p", + "type": "float" + }, + { + "name": "top_k", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta2.types.GenerateMessageResponse", + "shortName": "generate_message" + }, + "description": "Sample for GenerateMessage", + "file": "generativelanguage_v1beta2_generated_discuss_service_generate_message_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta2_generated_DiscussService_GenerateMessage_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta2_generated_discuss_service_generate_message_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta2.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta2.ModelServiceAsyncClient.get_model", + "method": { + "fullName": "google.ai.generativelanguage.v1beta2.ModelService.GetModel", + "service": { + "fullName": "google.ai.generativelanguage.v1beta2.ModelService", + "shortName": "ModelService" + }, + "shortName": "GetModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta2.types.GetModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta2.types.Model", + "shortName": "get_model" + }, + "description": "Sample for GetModel", + "file": "generativelanguage_v1beta2_generated_model_service_get_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta2_generated_ModelService_GetModel_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta2_generated_model_service_get_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta2.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta2.ModelServiceClient.get_model", + "method": { + "fullName": "google.ai.generativelanguage.v1beta2.ModelService.GetModel", + "service": { + "fullName": "google.ai.generativelanguage.v1beta2.ModelService", + "shortName": "ModelService" + }, + "shortName": "GetModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta2.types.GetModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta2.types.Model", + "shortName": "get_model" + }, + "description": "Sample for GetModel", + "file": "generativelanguage_v1beta2_generated_model_service_get_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta2_generated_ModelService_GetModel_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta2_generated_model_service_get_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta2.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta2.ModelServiceAsyncClient.list_models", + "method": { + "fullName": "google.ai.generativelanguage.v1beta2.ModelService.ListModels", + "service": { + "fullName": "google.ai.generativelanguage.v1beta2.ModelService", + "shortName": "ModelService" + }, + "shortName": "ListModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta2.types.ListModelsRequest" + }, + { + "name": "page_size", + "type": "int" + }, + { + "name": "page_token", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta2.services.model_service.pagers.ListModelsAsyncPager", + "shortName": "list_models" + }, + "description": "Sample for ListModels", + "file": "generativelanguage_v1beta2_generated_model_service_list_models_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta2_generated_ModelService_ListModels_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta2_generated_model_service_list_models_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta2.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta2.ModelServiceClient.list_models", + "method": { + "fullName": "google.ai.generativelanguage.v1beta2.ModelService.ListModels", + "service": { + "fullName": "google.ai.generativelanguage.v1beta2.ModelService", + "shortName": "ModelService" + }, + "shortName": "ListModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta2.types.ListModelsRequest" + }, + { + "name": "page_size", + "type": "int" + }, + { + "name": "page_token", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta2.services.model_service.pagers.ListModelsPager", + "shortName": "list_models" + }, + "description": "Sample for ListModels", + "file": "generativelanguage_v1beta2_generated_model_service_list_models_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta2_generated_ModelService_ListModels_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta2_generated_model_service_list_models_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta2.TextServiceAsyncClient", + "shortName": "TextServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta2.TextServiceAsyncClient.embed_text", + "method": { + "fullName": "google.ai.generativelanguage.v1beta2.TextService.EmbedText", + "service": { + "fullName": "google.ai.generativelanguage.v1beta2.TextService", + "shortName": "TextService" + }, + "shortName": "EmbedText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta2.types.EmbedTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "text", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta2.types.EmbedTextResponse", + "shortName": "embed_text" + }, + "description": "Sample for EmbedText", + "file": "generativelanguage_v1beta2_generated_text_service_embed_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta2_generated_TextService_EmbedText_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta2_generated_text_service_embed_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta2.TextServiceClient", + "shortName": "TextServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta2.TextServiceClient.embed_text", + "method": { + "fullName": "google.ai.generativelanguage.v1beta2.TextService.EmbedText", + "service": { + "fullName": "google.ai.generativelanguage.v1beta2.TextService", + "shortName": "TextService" + }, + "shortName": "EmbedText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta2.types.EmbedTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "text", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta2.types.EmbedTextResponse", + "shortName": "embed_text" + }, + "description": "Sample for EmbedText", + "file": "generativelanguage_v1beta2_generated_text_service_embed_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta2_generated_TextService_EmbedText_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta2_generated_text_service_embed_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta2.TextServiceAsyncClient", + "shortName": "TextServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta2.TextServiceAsyncClient.generate_text", + "method": { + "fullName": "google.ai.generativelanguage.v1beta2.TextService.GenerateText", + "service": { + "fullName": "google.ai.generativelanguage.v1beta2.TextService", + "shortName": "TextService" + }, + "shortName": "GenerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta2.types.GenerateTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1beta2.types.TextPrompt" + }, + { + "name": "temperature", + "type": "float" + }, + { + "name": "candidate_count", + "type": "int" + }, + { + "name": "max_output_tokens", + "type": "int" + }, + { + "name": "top_p", + "type": "float" + }, + { + "name": "top_k", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta2.types.GenerateTextResponse", + "shortName": "generate_text" + }, + "description": "Sample for GenerateText", + "file": "generativelanguage_v1beta2_generated_text_service_generate_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta2_generated_TextService_GenerateText_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta2_generated_text_service_generate_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta2.TextServiceClient", + "shortName": "TextServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta2.TextServiceClient.generate_text", + "method": { + "fullName": "google.ai.generativelanguage.v1beta2.TextService.GenerateText", + "service": { + "fullName": "google.ai.generativelanguage.v1beta2.TextService", + "shortName": "TextService" + }, + "shortName": "GenerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta2.types.GenerateTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1beta2.types.TextPrompt" + }, + { + "name": "temperature", + "type": "float" + }, + { + "name": "candidate_count", + "type": "int" + }, + { + "name": "max_output_tokens", + "type": "int" + }, + { + "name": "top_p", + "type": "float" + }, + { + "name": "top_k", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta2.types.GenerateTextResponse", + "shortName": "generate_text" + }, + "description": "Sample for GenerateText", + "file": "generativelanguage_v1beta2_generated_text_service_generate_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta2_generated_TextService_GenerateText_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta2_generated_text_service_generate_text_sync.py" + } + ] +} diff --git a/packages/google-ai-generativelanguage/scripts/decrypt-secrets.sh b/packages/google-ai-generativelanguage/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-ai-generativelanguage/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta2_keywords.py b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta2_keywords.py new file mode 100644 index 000000000000..e773a3138225 --- /dev/null +++ b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta2_keywords.py @@ -0,0 +1,181 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class generativelanguageCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'count_message_tokens': ('model', 'prompt', ), + 'embed_text': ('model', 'text', ), + 'generate_message': ('model', 'prompt', 'temperature', 'candidate_count', 'top_p', 'top_k', ), + 'generate_text': ('model', 'prompt', 'temperature', 'candidate_count', 'max_output_tokens', 'top_p', 'top_k', 'safety_settings', 'stop_sequences', ), + 'get_model': ('name', ), + 'list_models': ('page_size', 'page_token', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=generativelanguageCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the generativelanguage client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-ai-generativelanguage/setup.py b/packages/google-ai-generativelanguage/setup.py new file mode 100644 index 000000000000..92e668299278 --- /dev/null +++ b/packages/google-ai-generativelanguage/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-ai-generativelanguage" + + +description = "Google Ai Generativelanguage API client library" + +version = {} +with open( + os.path.join(package_root, "google/ai/generativelanguage/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.ai"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-ai-generativelanguage/testing/.gitignore b/packages/google-ai-generativelanguage/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-ai-generativelanguage/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-ai-generativelanguage/testing/constraints-3.10.txt b/packages/google-ai-generativelanguage/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ai-generativelanguage/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ai-generativelanguage/testing/constraints-3.11.txt b/packages/google-ai-generativelanguage/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ai-generativelanguage/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ai-generativelanguage/testing/constraints-3.12.txt b/packages/google-ai-generativelanguage/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ai-generativelanguage/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ai-generativelanguage/testing/constraints-3.7.txt b/packages/google-ai-generativelanguage/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-ai-generativelanguage/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-ai-generativelanguage/testing/constraints-3.8.txt b/packages/google-ai-generativelanguage/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ai-generativelanguage/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ai-generativelanguage/testing/constraints-3.9.txt b/packages/google-ai-generativelanguage/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ai-generativelanguage/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ai-generativelanguage/tests/__init__.py b/packages/google-ai-generativelanguage/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ai-generativelanguage/tests/unit/__init__.py b/packages/google-ai-generativelanguage/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/__init__.py b/packages/google-ai-generativelanguage/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/__init__.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py new file mode 100644 index 000000000000..ce463d8914f3 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py @@ -0,0 +1,2548 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ai.generativelanguage_v1beta2.services.discuss_service import ( + DiscussServiceAsyncClient, + DiscussServiceClient, + transports, +) +from google.ai.generativelanguage_v1beta2.types import citation, discuss_service, safety + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DiscussServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DiscussServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DiscussServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DiscussServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DiscussServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DiscussServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DiscussServiceClient, "grpc"), + (DiscussServiceAsyncClient, "grpc_asyncio"), + (DiscussServiceClient, "rest"), + ], +) +def test_discuss_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DiscussServiceGrpcTransport, "grpc"), + (transports.DiscussServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DiscussServiceRestTransport, "rest"), + ], +) +def test_discuss_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DiscussServiceClient, "grpc"), + (DiscussServiceAsyncClient, "grpc_asyncio"), + (DiscussServiceClient, "rest"), + ], +) +def test_discuss_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_discuss_service_client_get_transport_class(): + transport = DiscussServiceClient.get_transport_class() + available_transports = [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceRestTransport, + ] + assert transport in available_transports + + transport = DiscussServiceClient.get_transport_class("grpc") + assert transport == transports.DiscussServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DiscussServiceClient, transports.DiscussServiceGrpcTransport, "grpc"), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + DiscussServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DiscussServiceClient), +) +@mock.patch.object( + DiscussServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DiscussServiceAsyncClient), +) +def test_discuss_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DiscussServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DiscussServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (DiscussServiceClient, transports.DiscussServiceGrpcTransport, "grpc", "true"), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (DiscussServiceClient, transports.DiscussServiceGrpcTransport, "grpc", "false"), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest", "true"), + (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + DiscussServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DiscussServiceClient), +) +@mock.patch.object( + DiscussServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DiscussServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_discuss_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [DiscussServiceClient, DiscussServiceAsyncClient] +) +@mock.patch.object( + DiscussServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DiscussServiceClient), +) +@mock.patch.object( + DiscussServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DiscussServiceAsyncClient), +) +def test_discuss_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DiscussServiceClient, transports.DiscussServiceGrpcTransport, "grpc"), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest"), + ], +) +def test_discuss_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DiscussServiceClient, + transports.DiscussServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest", None), + ], +) +def test_discuss_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_discuss_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1beta2.services.discuss_service.transports.DiscussServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DiscussServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DiscussServiceClient, + transports.DiscussServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_discuss_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + discuss_service.GenerateMessageRequest, + dict, + ], +) +def test_generate_message(request_type, transport: str = "grpc"): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.GenerateMessageResponse() + response = client.generate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == discuss_service.GenerateMessageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.GenerateMessageResponse) + + +def test_generate_message_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + client.generate_message() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == discuss_service.GenerateMessageRequest() + + +@pytest.mark.asyncio +async def test_generate_message_async( + transport: str = "grpc_asyncio", request_type=discuss_service.GenerateMessageRequest +): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.GenerateMessageResponse() + ) + response = await client.generate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == discuss_service.GenerateMessageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.GenerateMessageResponse) + + +@pytest.mark.asyncio +async def test_generate_message_async_from_dict(): + await test_generate_message_async(request_type=dict) + + +def test_generate_message_field_headers(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = discuss_service.GenerateMessageRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + call.return_value = discuss_service.GenerateMessageResponse() + client.generate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_message_field_headers_async(): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = discuss_service.GenerateMessageRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.GenerateMessageResponse() + ) + await client.generate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_generate_message_flattened(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.GenerateMessageResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_message( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = discuss_service.MessagePrompt(context="context_value") + assert arg == mock_val + assert math.isclose(args[0].temperature, 0.1198, rel_tol=1e-6) + arg = args[0].candidate_count + mock_val = 1573 + assert arg == mock_val + assert math.isclose(args[0].top_p, 0.546, rel_tol=1e-6) + arg = args[0].top_k + mock_val = 541 + assert arg == mock_val + + +def test_generate_message_flattened_error(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_message( + discuss_service.GenerateMessageRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + + +@pytest.mark.asyncio +async def test_generate_message_flattened_async(): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.GenerateMessageResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.GenerateMessageResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_message( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = discuss_service.MessagePrompt(context="context_value") + assert arg == mock_val + assert math.isclose(args[0].temperature, 0.1198, rel_tol=1e-6) + arg = args[0].candidate_count + mock_val = 1573 + assert arg == mock_val + assert math.isclose(args[0].top_p, 0.546, rel_tol=1e-6) + arg = args[0].top_k + mock_val = 541 + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_message_flattened_error_async(): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_message( + discuss_service.GenerateMessageRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + discuss_service.CountMessageTokensRequest, + dict, + ], +) +def test_count_message_tokens(request_type, transport: str = "grpc"): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.CountMessageTokensResponse( + token_count=1193, + ) + response = client.count_message_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == discuss_service.CountMessageTokensRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.CountMessageTokensResponse) + assert response.token_count == 1193 + + +def test_count_message_tokens_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + client.count_message_tokens() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == discuss_service.CountMessageTokensRequest() + + +@pytest.mark.asyncio +async def test_count_message_tokens_async( + transport: str = "grpc_asyncio", + request_type=discuss_service.CountMessageTokensRequest, +): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.CountMessageTokensResponse( + token_count=1193, + ) + ) + response = await client.count_message_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == discuss_service.CountMessageTokensRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.CountMessageTokensResponse) + assert response.token_count == 1193 + + +@pytest.mark.asyncio +async def test_count_message_tokens_async_from_dict(): + await test_count_message_tokens_async(request_type=dict) + + +def test_count_message_tokens_field_headers(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = discuss_service.CountMessageTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + call.return_value = discuss_service.CountMessageTokensResponse() + client.count_message_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_count_message_tokens_field_headers_async(): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = discuss_service.CountMessageTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.CountMessageTokensResponse() + ) + await client.count_message_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_count_message_tokens_flattened(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.CountMessageTokensResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.count_message_tokens( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = discuss_service.MessagePrompt(context="context_value") + assert arg == mock_val + + +def test_count_message_tokens_flattened_error(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_message_tokens( + discuss_service.CountMessageTokensRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + + +@pytest.mark.asyncio +async def test_count_message_tokens_flattened_async(): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.CountMessageTokensResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.CountMessageTokensResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.count_message_tokens( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = discuss_service.MessagePrompt(context="context_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_count_message_tokens_flattened_error_async(): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.count_message_tokens( + discuss_service.CountMessageTokensRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + discuss_service.GenerateMessageRequest, + dict, + ], +) +def test_generate_message_rest(request_type): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = discuss_service.GenerateMessageResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = discuss_service.GenerateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_message(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.GenerateMessageResponse) + + +def test_generate_message_rest_required_fields( + request_type=discuss_service.GenerateMessageRequest, +): + transport_class = transports.DiscussServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_message._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_message._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = discuss_service.GenerateMessageResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = discuss_service.GenerateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.generate_message(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_message_rest_unset_required_fields(): + transport = transports.DiscussServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_message._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "prompt", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_message_rest_interceptors(null_interceptor): + transport = transports.DiscussServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DiscussServiceRestInterceptor(), + ) + client = DiscussServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DiscussServiceRestInterceptor, "post_generate_message" + ) as post, mock.patch.object( + transports.DiscussServiceRestInterceptor, "pre_generate_message" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = discuss_service.GenerateMessageRequest.pb( + discuss_service.GenerateMessageRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = discuss_service.GenerateMessageResponse.to_json( + discuss_service.GenerateMessageResponse() + ) + + request = discuss_service.GenerateMessageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = discuss_service.GenerateMessageResponse() + + client.generate_message( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_message_rest_bad_request( + transport: str = "rest", request_type=discuss_service.GenerateMessageRequest +): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_message(request) + + +def test_generate_message_rest_flattened(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = discuss_service.GenerateMessageResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = discuss_service.GenerateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.generate_message(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/{model=models/*}:generateMessage" % client.transport._host, + args[1], + ) + + +def test_generate_message_rest_flattened_error(transport: str = "rest"): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_message( + discuss_service.GenerateMessageRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + + +def test_generate_message_rest_error(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + discuss_service.CountMessageTokensRequest, + dict, + ], +) +def test_count_message_tokens_rest(request_type): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = discuss_service.CountMessageTokensResponse( + token_count=1193, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = discuss_service.CountMessageTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.count_message_tokens(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.CountMessageTokensResponse) + assert response.token_count == 1193 + + +def test_count_message_tokens_rest_required_fields( + request_type=discuss_service.CountMessageTokensRequest, +): + transport_class = transports.DiscussServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_message_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_message_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = discuss_service.CountMessageTokensResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = discuss_service.CountMessageTokensResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.count_message_tokens(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_count_message_tokens_rest_unset_required_fields(): + transport = transports.DiscussServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.count_message_tokens._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "prompt", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_count_message_tokens_rest_interceptors(null_interceptor): + transport = transports.DiscussServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DiscussServiceRestInterceptor(), + ) + client = DiscussServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DiscussServiceRestInterceptor, "post_count_message_tokens" + ) as post, mock.patch.object( + transports.DiscussServiceRestInterceptor, "pre_count_message_tokens" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = discuss_service.CountMessageTokensRequest.pb( + discuss_service.CountMessageTokensRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = discuss_service.CountMessageTokensResponse.to_json( + discuss_service.CountMessageTokensResponse() + ) + + request = discuss_service.CountMessageTokensRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = discuss_service.CountMessageTokensResponse() + + client.count_message_tokens( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_count_message_tokens_rest_bad_request( + transport: str = "rest", request_type=discuss_service.CountMessageTokensRequest +): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.count_message_tokens(request) + + +def test_count_message_tokens_rest_flattened(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = discuss_service.CountMessageTokensResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = discuss_service.CountMessageTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.count_message_tokens(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/{model=models/*}:countMessageTokens" % client.transport._host, + args[1], + ) + + +def test_count_message_tokens_rest_flattened_error(transport: str = "rest"): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_message_tokens( + discuss_service.CountMessageTokensRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + + +def test_count_message_tokens_rest_error(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DiscussServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DiscussServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DiscussServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DiscussServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DiscussServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DiscussServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + transports.DiscussServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DiscussServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DiscussServiceGrpcTransport, + ) + + +def test_discuss_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DiscussServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_discuss_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1beta2.services.discuss_service.transports.DiscussServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DiscussServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "generate_message", + "count_message_tokens", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_discuss_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1beta2.services.discuss_service.transports.DiscussServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DiscussServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_discuss_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1beta2.services.discuss_service.transports.DiscussServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DiscussServiceTransport() + adc.assert_called_once() + + +def test_discuss_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DiscussServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + ], +) +def test_discuss_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + transports.DiscussServiceRestTransport, + ], +) +def test_discuss_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DiscussServiceGrpcTransport, grpc_helpers), + (transports.DiscussServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_discuss_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + ], +) +def test_discuss_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_discuss_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DiscussServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_discuss_service_host_no_port(transport_name): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_discuss_service_host_with_port(transport_name): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_discuss_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DiscussServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DiscussServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.generate_message._session + session2 = client2.transport.generate_message._session + assert session1 != session2 + session1 = client1.transport.count_message_tokens._session + session2 = client2.transport.count_message_tokens._session + assert session1 != session2 + + +def test_discuss_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DiscussServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_discuss_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DiscussServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + ], +) +def test_discuss_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + ], +) +def test_discuss_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, + ) + actual = DiscussServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "clam", + } + path = DiscussServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DiscussServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = DiscussServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DiscussServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = DiscussServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DiscussServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = DiscussServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = DiscussServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = DiscussServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DiscussServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = DiscussServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DiscussServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DiscussServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DiscussServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DiscussServiceClient, transports.DiscussServiceGrpcTransport), + (DiscussServiceAsyncClient, transports.DiscussServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py new file mode 100644 index 000000000000..fd9ad7c169ab --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py @@ -0,0 +1,2594 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ai.generativelanguage_v1beta2.services.model_service import ( + ModelServiceAsyncClient, + ModelServiceClient, + pagers, + transports, +) +from google.ai.generativelanguage_v1beta2.types import model, model_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ModelServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ModelServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + ModelServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ModelServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ModelServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ModelServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ModelServiceClient, "grpc"), + (ModelServiceAsyncClient, "grpc_asyncio"), + (ModelServiceClient, "rest"), + ], +) +def test_model_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ModelServiceGrpcTransport, "grpc"), + (transports.ModelServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ModelServiceRestTransport, "rest"), + ], +) +def test_model_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ModelServiceClient, "grpc"), + (ModelServiceAsyncClient, "grpc_asyncio"), + (ModelServiceClient, "rest"), + ], +) +def test_model_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_model_service_client_get_transport_class(): + transport = ModelServiceClient.get_transport_class() + available_transports = [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceRestTransport, + ] + assert transport in available_transports + + transport = ModelServiceClient.get_transport_class("grpc") + assert transport == transports.ModelServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + ModelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ModelServiceClient) +) +@mock.patch.object( + ModelServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ModelServiceAsyncClient), +) +def test_model_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ModelServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ModelServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc", "true"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc", "false"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest", "true"), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ModelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ModelServiceClient) +) +@mock.patch.object( + ModelServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ModelServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_model_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ModelServiceClient, ModelServiceAsyncClient]) +@mock.patch.object( + ModelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ModelServiceClient) +) +@mock.patch.object( + ModelServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ModelServiceAsyncClient), +) +def test_model_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest"), + ], +) +def test_model_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ModelServiceClient, + transports.ModelServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest", None), + ], +) +def test_model_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_model_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1beta2.services.model_service.transports.ModelServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ModelServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ModelServiceClient, + transports.ModelServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_model_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.GetModelRequest, + dict, + ], +) +def test_get_model(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model.Model( + name="name_value", + base_model_id="base_model_id_value", + version="version_value", + display_name="display_name_value", + description="description_value", + input_token_limit=1838, + output_token_limit=1967, + supported_generation_methods=["supported_generation_methods_value"], + temperature=0.1198, + top_p=0.546, + top_k=541, + ) + response = client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.GetModelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, model.Model) + assert response.name == "name_value" + assert response.base_model_id == "base_model_id_value" + assert response.version == "version_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.input_token_limit == 1838 + assert response.output_token_limit == 1967 + assert response.supported_generation_methods == [ + "supported_generation_methods_value" + ] + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + + +def test_get_model_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + client.get_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.GetModelRequest() + + +@pytest.mark.asyncio +async def test_get_model_async( + transport: str = "grpc_asyncio", request_type=model_service.GetModelRequest +): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model.Model( + name="name_value", + base_model_id="base_model_id_value", + version="version_value", + display_name="display_name_value", + description="description_value", + input_token_limit=1838, + output_token_limit=1967, + supported_generation_methods=["supported_generation_methods_value"], + temperature=0.1198, + top_p=0.546, + top_k=541, + ) + ) + response = await client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.GetModelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, model.Model) + assert response.name == "name_value" + assert response.base_model_id == "base_model_id_value" + assert response.version == "version_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.input_token_limit == 1838 + assert response.output_token_limit == 1967 + assert response.supported_generation_methods == [ + "supported_generation_methods_value" + ] + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + + +@pytest.mark.asyncio +async def test_get_model_async_from_dict(): + await test_get_model_async(request_type=dict) + + +def test_get_model_field_headers(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.GetModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + call.return_value = model.Model() + client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_model_field_headers_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.GetModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model()) + await client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_model_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model.Model() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_model_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_model( + model_service.GetModelRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_model_flattened_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model.Model() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_model_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_model( + model_service.GetModelRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.ListModelsRequest, + dict, + ], +) +def test_list_models(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListModelsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.ListModelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListModelsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_models_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + client.list_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.ListModelsRequest() + + +@pytest.mark.asyncio +async def test_list_models_async( + transport: str = "grpc_asyncio", request_type=model_service.ListModelsRequest +): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model_service.ListModelsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.ListModelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListModelsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_models_async_from_dict(): + await test_list_models_async(request_type=dict) + + +def test_list_models_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListModelsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_models( + page_size=951, + page_token="page_token_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].page_size + mock_val = 951 + assert arg == mock_val + arg = args[0].page_token + mock_val = "page_token_value" + assert arg == mock_val + + +def test_list_models_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_models( + model_service.ListModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_models_flattened_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListModelsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model_service.ListModelsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_models( + page_size=951, + page_token="page_token_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].page_size + mock_val = 951 + assert arg == mock_val + arg = args[0].page_token + mock_val = "page_token_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_models_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_models( + model_service.ListModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +def test_list_models_pager(transport_name: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_models(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, model.Model) for i in results) + + +def test_list_models_pages(transport_name: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + pages = list(client.list_models(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_models_async_pager(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_models), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_models( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, model.Model) for i in responses) + + +@pytest.mark.asyncio +async def test_list_models_async_pages(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_models), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_models(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.GetModelRequest, + dict, + ], +) +def test_get_model_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model.Model( + name="name_value", + base_model_id="base_model_id_value", + version="version_value", + display_name="display_name_value", + description="description_value", + input_token_limit=1838, + output_token_limit=1967, + supported_generation_methods=["supported_generation_methods_value"], + temperature=0.1198, + top_p=0.546, + top_k=541, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = model.Model.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_model(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, model.Model) + assert response.name == "name_value" + assert response.base_model_id == "base_model_id_value" + assert response.version == "version_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.input_token_limit == 1838 + assert response.output_token_limit == 1967 + assert response.supported_generation_methods == [ + "supported_generation_methods_value" + ] + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + + +def test_get_model_rest_required_fields(request_type=model_service.GetModelRequest): + transport_class = transports.ModelServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = model.Model() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = model.Model.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_model_rest_unset_required_fields(): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_model_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_get_model" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_get_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = model_service.GetModelRequest.pb(model_service.GetModelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = model.Model.to_json(model.Model()) + + request = model_service.GetModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = model.Model() + + client.get_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_model_rest_bad_request( + transport: str = "rest", request_type=model_service.GetModelRequest +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_model(request) + + +def test_get_model_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model.Model() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = model.Model.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_model(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/{name=models/*}" % client.transport._host, args[1] + ) + + +def test_get_model_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_model( + model_service.GetModelRequest(), + name="name_value", + ) + + +def test_get_model_rest_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.ListModelsRequest, + dict, + ], +) +def test_list_models_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model_service.ListModelsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = model_service.ListModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_models(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListModelsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_models_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_list_models" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_list_models" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = model_service.ListModelsRequest.pb( + model_service.ListModelsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = model_service.ListModelsResponse.to_json( + model_service.ListModelsResponse() + ) + + request = model_service.ListModelsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = model_service.ListModelsResponse() + + client.list_models( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_models_rest_bad_request( + transport: str = "rest", request_type=model_service.ListModelsRequest +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_models(request) + + +def test_list_models_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model_service.ListModelsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + page_size=951, + page_token="page_token_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = model_service.ListModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_models(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/models" % client.transport._host, args[1] + ) + + +def test_list_models_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_models( + model_service.ListModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +def test_list_models_rest_pager(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(model_service.ListModelsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_models(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, model.Model) for i in results) + + pages = list(client.list_models(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ModelServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ModelServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceGrpcAsyncIOTransport, + transports.ModelServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ModelServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ModelServiceGrpcTransport, + ) + + +def test_model_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ModelServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_model_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1beta2.services.model_service.transports.ModelServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ModelServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_model", + "list_models", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_model_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1beta2.services.model_service.transports.ModelServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ModelServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_model_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1beta2.services.model_service.transports.ModelServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ModelServiceTransport() + adc.assert_called_once() + + +def test_model_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ModelServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceGrpcAsyncIOTransport, + ], +) +def test_model_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceGrpcAsyncIOTransport, + transports.ModelServiceRestTransport, + ], +) +def test_model_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ModelServiceGrpcTransport, grpc_helpers), + (transports.ModelServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_model_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.ModelServiceGrpcTransport, transports.ModelServiceGrpcAsyncIOTransport], +) +def test_model_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_model_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ModelServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_model_service_host_no_port(transport_name): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_model_service_host_with_port(transport_name): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_model_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ModelServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ModelServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_model._session + session2 = client2.transport.get_model._session + assert session1 != session2 + session1 = client1.transport.list_models._session + session2 = client2.transport.list_models._session + assert session1 != session2 + + +def test_model_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ModelServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_model_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ModelServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.ModelServiceGrpcTransport, transports.ModelServiceGrpcAsyncIOTransport], +) +def test_model_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.ModelServiceGrpcTransport, transports.ModelServiceGrpcAsyncIOTransport], +) +def test_model_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, + ) + actual = ModelServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "clam", + } + path = ModelServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ModelServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ModelServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ModelServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ModelServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ModelServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ModelServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = ModelServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ModelServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ModelServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ModelServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ModelServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ModelServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ModelServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport), + (ModelServiceAsyncClient, transports.ModelServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py new file mode 100644 index 000000000000..6aadbe294ebc --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py @@ -0,0 +1,2498 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ai.generativelanguage_v1beta2.services.text_service import ( + TextServiceAsyncClient, + TextServiceClient, + transports, +) +from google.ai.generativelanguage_v1beta2.types import safety, text_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TextServiceClient._get_default_mtls_endpoint(None) is None + assert ( + TextServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + TextServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + TextServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + TextServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert TextServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TextServiceClient, "grpc"), + (TextServiceAsyncClient, "grpc_asyncio"), + (TextServiceClient, "rest"), + ], +) +def test_text_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.TextServiceGrpcTransport, "grpc"), + (transports.TextServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.TextServiceRestTransport, "rest"), + ], +) +def test_text_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TextServiceClient, "grpc"), + (TextServiceAsyncClient, "grpc_asyncio"), + (TextServiceClient, "rest"), + ], +) +def test_text_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_text_service_client_get_transport_class(): + transport = TextServiceClient.get_transport_class() + available_transports = [ + transports.TextServiceGrpcTransport, + transports.TextServiceRestTransport, + ] + assert transport in available_transports + + transport = TextServiceClient.get_transport_class("grpc") + assert transport == transports.TextServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc"), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (TextServiceClient, transports.TextServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + TextServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TextServiceClient) +) +@mock.patch.object( + TextServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextServiceAsyncClient), +) +def test_text_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TextServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TextServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc", "true"), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc", "false"), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (TextServiceClient, transports.TextServiceRestTransport, "rest", "true"), + (TextServiceClient, transports.TextServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + TextServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TextServiceClient) +) +@mock.patch.object( + TextServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_text_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [TextServiceClient, TextServiceAsyncClient]) +@mock.patch.object( + TextServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TextServiceClient) +) +@mock.patch.object( + TextServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextServiceAsyncClient), +) +def test_text_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc"), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (TextServiceClient, transports.TextServiceRestTransport, "rest"), + ], +) +def test_text_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc", grpc_helpers), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (TextServiceClient, transports.TextServiceRestTransport, "rest", None), + ], +) +def test_text_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_text_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1beta2.services.text_service.transports.TextServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = TextServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc", grpc_helpers), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_text_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.GenerateTextRequest, + dict, + ], +) +def test_generate_text(request_type, transport: str = "grpc"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.GenerateTextResponse() + response = client.generate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.GenerateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.GenerateTextResponse) + + +def test_generate_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + client.generate_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.GenerateTextRequest() + + +@pytest.mark.asyncio +async def test_generate_text_async( + transport: str = "grpc_asyncio", request_type=text_service.GenerateTextRequest +): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.GenerateTextResponse() + ) + response = await client.generate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.GenerateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.GenerateTextResponse) + + +@pytest.mark.asyncio +async def test_generate_text_async_from_dict(): + await test_generate_text_async(request_type=dict) + + +def test_generate_text_field_headers(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.GenerateTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + call.return_value = text_service.GenerateTextResponse() + client.generate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_text_field_headers_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.GenerateTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.GenerateTextResponse() + ) + await client.generate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_generate_text_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.GenerateTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_text( + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = text_service.TextPrompt(text="text_value") + assert arg == mock_val + assert math.isclose(args[0].temperature, 0.1198, rel_tol=1e-6) + arg = args[0].candidate_count + mock_val = 1573 + assert arg == mock_val + arg = args[0].max_output_tokens + mock_val = 1865 + assert arg == mock_val + assert math.isclose(args[0].top_p, 0.546, rel_tol=1e-6) + arg = args[0].top_k + mock_val = 541 + assert arg == mock_val + + +def test_generate_text_flattened_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_text( + text_service.GenerateTextRequest(), + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + + +@pytest.mark.asyncio +async def test_generate_text_flattened_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.GenerateTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.GenerateTextResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_text( + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = text_service.TextPrompt(text="text_value") + assert arg == mock_val + assert math.isclose(args[0].temperature, 0.1198, rel_tol=1e-6) + arg = args[0].candidate_count + mock_val = 1573 + assert arg == mock_val + arg = args[0].max_output_tokens + mock_val = 1865 + assert arg == mock_val + assert math.isclose(args[0].top_p, 0.546, rel_tol=1e-6) + arg = args[0].top_k + mock_val = 541 + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_text_flattened_error_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_text( + text_service.GenerateTextRequest(), + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.EmbedTextRequest, + dict, + ], +) +def test_embed_text(request_type, transport: str = "grpc"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.EmbedTextResponse() + response = client.embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.EmbedTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.EmbedTextResponse) + + +def test_embed_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + client.embed_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.EmbedTextRequest() + + +@pytest.mark.asyncio +async def test_embed_text_async( + transport: str = "grpc_asyncio", request_type=text_service.EmbedTextRequest +): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.EmbedTextResponse() + ) + response = await client.embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.EmbedTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.EmbedTextResponse) + + +@pytest.mark.asyncio +async def test_embed_text_async_from_dict(): + await test_embed_text_async(request_type=dict) + + +def test_embed_text_field_headers(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.EmbedTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + call.return_value = text_service.EmbedTextResponse() + client.embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_embed_text_field_headers_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.EmbedTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.EmbedTextResponse() + ) + await client.embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_embed_text_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.EmbedTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.embed_text( + model="model_value", + text="text_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].text + mock_val = "text_value" + assert arg == mock_val + + +def test_embed_text_flattened_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.embed_text( + text_service.EmbedTextRequest(), + model="model_value", + text="text_value", + ) + + +@pytest.mark.asyncio +async def test_embed_text_flattened_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.EmbedTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.EmbedTextResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.embed_text( + model="model_value", + text="text_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].text + mock_val = "text_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_embed_text_flattened_error_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.embed_text( + text_service.EmbedTextRequest(), + model="model_value", + text="text_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.GenerateTextRequest, + dict, + ], +) +def test_generate_text_rest(request_type): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.GenerateTextResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = text_service.GenerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.GenerateTextResponse) + + +def test_generate_text_rest_required_fields( + request_type=text_service.GenerateTextRequest, +): + transport_class = transports.TextServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = text_service.GenerateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = text_service.GenerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.generate_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_text_rest_unset_required_fields(): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_text._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "prompt", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_text_rest_interceptors(null_interceptor): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextServiceRestInterceptor(), + ) + client = TextServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TextServiceRestInterceptor, "post_generate_text" + ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "pre_generate_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = text_service.GenerateTextRequest.pb( + text_service.GenerateTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = text_service.GenerateTextResponse.to_json( + text_service.GenerateTextResponse() + ) + + request = text_service.GenerateTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = text_service.GenerateTextResponse() + + client.generate_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_text_rest_bad_request( + transport: str = "rest", request_type=text_service.GenerateTextRequest +): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_text(request) + + +def test_generate_text_rest_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.GenerateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = text_service.GenerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.generate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/{model=models/*}:generateText" % client.transport._host, args[1] + ) + + +def test_generate_text_rest_flattened_error(transport: str = "rest"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_text( + text_service.GenerateTextRequest(), + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + + +def test_generate_text_rest_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.EmbedTextRequest, + dict, + ], +) +def test_embed_text_rest(request_type): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.EmbedTextResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = text_service.EmbedTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.embed_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.EmbedTextResponse) + + +def test_embed_text_rest_required_fields(request_type=text_service.EmbedTextRequest): + transport_class = transports.TextServiceRestTransport + + request_init = {} + request_init["model"] = "" + request_init["text"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).embed_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + jsonified_request["text"] = "text_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).embed_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + assert "text" in jsonified_request + assert jsonified_request["text"] == "text_value" + + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = text_service.EmbedTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = text_service.EmbedTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.embed_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_embed_text_rest_unset_required_fields(): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.embed_text._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "text", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_embed_text_rest_interceptors(null_interceptor): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextServiceRestInterceptor(), + ) + client = TextServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TextServiceRestInterceptor, "post_embed_text" + ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "pre_embed_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = text_service.EmbedTextRequest.pb(text_service.EmbedTextRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = text_service.EmbedTextResponse.to_json( + text_service.EmbedTextResponse() + ) + + request = text_service.EmbedTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = text_service.EmbedTextResponse() + + client.embed_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_embed_text_rest_bad_request( + transport: str = "rest", request_type=text_service.EmbedTextRequest +): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.embed_text(request) + + +def test_embed_text_rest_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.EmbedTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + text="text_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = text_service.EmbedTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.embed_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/{model=models/*}:embedText" % client.transport._host, args[1] + ) + + +def test_embed_text_rest_flattened_error(transport: str = "rest"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.embed_text( + text_service.EmbedTextRequest(), + model="model_value", + text="text_value", + ) + + +def test_embed_text_rest_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TextServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TextServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TextServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TextServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextServiceGrpcTransport, + transports.TextServiceGrpcAsyncIOTransport, + transports.TextServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = TextServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TextServiceGrpcTransport, + ) + + +def test_text_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TextServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_text_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1beta2.services.text_service.transports.TextServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.TextServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "generate_text", + "embed_text", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_text_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1beta2.services.text_service.transports.TextServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TextServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_text_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1beta2.services.text_service.transports.TextServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TextServiceTransport() + adc.assert_called_once() + + +def test_text_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TextServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextServiceGrpcTransport, + transports.TextServiceGrpcAsyncIOTransport, + ], +) +def test_text_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextServiceGrpcTransport, + transports.TextServiceGrpcAsyncIOTransport, + transports.TextServiceRestTransport, + ], +) +def test_text_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TextServiceGrpcTransport, grpc_helpers), + (transports.TextServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_text_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.TextServiceGrpcTransport, transports.TextServiceGrpcAsyncIOTransport], +) +def test_text_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_text_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.TextServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_text_service_host_no_port(transport_name): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_text_service_host_with_port(transport_name): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_text_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TextServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TextServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.generate_text._session + session2 = client2.transport.generate_text._session + assert session1 != session2 + session1 = client1.transport.embed_text._session + session2 = client2.transport.embed_text._session + assert session1 != session2 + + +def test_text_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TextServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_text_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TextServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.TextServiceGrpcTransport, transports.TextServiceGrpcAsyncIOTransport], +) +def test_text_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.TextServiceGrpcTransport, transports.TextServiceGrpcAsyncIOTransport], +) +def test_text_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, + ) + actual = TextServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "clam", + } + path = TextServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = TextServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = TextServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = TextServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = TextServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = TextServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = TextServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = TextServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = TextServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = TextServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = TextServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.TextServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.TextServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = TextServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (TextServiceClient, transports.TextServiceGrpcTransport), + (TextServiceAsyncClient, transports.TextServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-apps-script-type/.coveragerc b/packages/google-apps-script-type/.coveragerc index 2327ec0b2f18..5b06ae94a652 100644 --- a/packages/google-apps-script-type/.coveragerc +++ b/packages/google-apps-script-type/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/apps/script/type/slides/__init__.py + google/apps/script/type/slides/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/google-apps-script-type/.repo-metadata.json b/packages/google-apps-script-type/.repo-metadata.json index b51f5a422171..fab06f5109fb 100644 --- a/packages/google-apps-script-type/.repo-metadata.json +++ b/packages/google-apps-script-type/.repo-metadata.json @@ -3,7 +3,7 @@ "name_pretty": "Google Apps Script Type Protos", "api_description": "", "product_documentation": "https://developers.google.com/apps-script/", - "client_documentation": "https://cloud.google.com/python/docs/reference/type/latest", + "client_documentation": "https://googleapis.dev/python/type/latest", "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", "release_level": "preview", "language": "python", diff --git a/packages/google-apps-script-type/CHANGELOG.md b/packages/google-apps-script-type/CHANGELOG.md index 8ef5e42c7bc7..1bdf8d1bd68b 100644 --- a/packages/google-apps-script-type/CHANGELOG.md +++ b/packages/google-apps-script-type/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.3.2](https://github.com/googleapis/google-cloud-python/compare/google-apps-script-type-v0.3.1...google-apps-script-type-v0.3.2) (2023-06-03) + + +### Documentation + +* fix broken client library documentation links ([#11192](https://github.com/googleapis/google-cloud-python/issues/11192)) ([5e17f7a](https://github.com/googleapis/google-cloud-python/commit/5e17f7a901bbbae8ff9a44ed62f1abd2386da2c8)) + ## [0.3.1](https://github.com/googleapis/google-cloud-python/compare/google-apps-script-type-v0.3.0...google-apps-script-type-v0.3.1) (2023-01-20) diff --git a/packages/google-apps-script-type/README.rst b/packages/google-apps-script-type/README.rst index 65442e4f4eca..bc32090e8cb7 100644 --- a/packages/google-apps-script-type/README.rst +++ b/packages/google-apps-script-type/README.rst @@ -15,7 +15,7 @@ Python Client for Google Apps Script Type Protos .. |versions| image:: https://img.shields.io/pypi/pyversions/google-apps-script-type.svg :target: https://pypi.org/project/google-apps-script-type/ .. _Google Apps Script Type Protos: https://developers.google.com/apps-script/ -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/type/latest +.. _Client Library Documentation: https://googleapis.dev/python/type/latest .. _Product Documentation: https://developers.google.com/apps-script/ Quick Start diff --git a/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py index aa80f74315ce..553d98052667 100644 --- a/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.3.2" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/calendar/types/calendar_addon_manifest.py b/packages/google-apps-script-type/google/apps/script/type/calendar/types/calendar_addon_manifest.py index c9eabb171603..6e4368a45a88 100644 --- a/packages/google-apps-script-type/google/apps/script/type/calendar/types/calendar_addon_manifest.py +++ b/packages/google-apps-script-type/google/apps/script/type/calendar/types/calendar_addon_manifest.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py index aa80f74315ce..553d98052667 100644 --- a/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.3.2" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/docs/types/docs_addon_manifest.py b/packages/google-apps-script-type/google/apps/script/type/docs/types/docs_addon_manifest.py index 8455035c928b..8e97df034ec8 100644 --- a/packages/google-apps-script-type/google/apps/script/type/docs/types/docs_addon_manifest.py +++ b/packages/google-apps-script-type/google/apps/script/type/docs/types/docs_addon_manifest.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py index aa80f74315ce..553d98052667 100644 --- a/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.3.2" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/drive/types/drive_addon_manifest.py b/packages/google-apps-script-type/google/apps/script/type/drive/types/drive_addon_manifest.py index 5fa074debf1f..fcc5ff1a50d7 100644 --- a/packages/google-apps-script-type/google/apps/script/type/drive/types/drive_addon_manifest.py +++ b/packages/google-apps-script-type/google/apps/script/type/drive/types/drive_addon_manifest.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-apps-script-type/google/apps/script/type/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/gapic_version.py index 056efd287f1d..553d98052667 100644 --- a/packages/google-apps-script-type/google/apps/script/type/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.3.1" # {x-release-please-version} +__version__ = "0.3.2" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py index aa80f74315ce..553d98052667 100644 --- a/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.3.2" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/gmail/types/gmail_addon_manifest.py b/packages/google-apps-script-type/google/apps/script/type/gmail/types/gmail_addon_manifest.py index 68c5654f2a88..10fe6912c51b 100644 --- a/packages/google-apps-script-type/google/apps/script/type/gmail/types/gmail_addon_manifest.py +++ b/packages/google-apps-script-type/google/apps/script/type/gmail/types/gmail_addon_manifest.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py index aa80f74315ce..553d98052667 100644 --- a/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.3.2" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/sheets/types/sheets_addon_manifest.py b/packages/google-apps-script-type/google/apps/script/type/sheets/types/sheets_addon_manifest.py index 69c07c836c16..3f6aff5ef3ac 100644 --- a/packages/google-apps-script-type/google/apps/script/type/sheets/types/sheets_addon_manifest.py +++ b/packages/google-apps-script-type/google/apps/script/type/sheets/types/sheets_addon_manifest.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py index aa80f74315ce..553d98052667 100644 --- a/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.3.2" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/slides/types/slides_addon_manifest.py b/packages/google-apps-script-type/google/apps/script/type/slides/types/slides_addon_manifest.py index 99e770417556..3a6856d3d406 100644 --- a/packages/google-apps-script-type/google/apps/script/type/slides/types/slides_addon_manifest.py +++ b/packages/google-apps-script-type/google/apps/script/type/slides/types/slides_addon_manifest.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-apps-script-type/google/apps/script/type/types/addon_widget_set.py b/packages/google-apps-script-type/google/apps/script/type/types/addon_widget_set.py index fe86d1550e87..3c0e85b10c23 100644 --- a/packages/google-apps-script-type/google/apps/script/type/types/addon_widget_set.py +++ b/packages/google-apps-script-type/google/apps/script/type/types/addon_widget_set.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-apps-script-type/google/apps/script/type/types/extension_point.py b/packages/google-apps-script-type/google/apps/script/type/types/extension_point.py index 08039c312026..5669a075bbc0 100644 --- a/packages/google-apps-script-type/google/apps/script/type/types/extension_point.py +++ b/packages/google-apps-script-type/google/apps/script/type/types/extension_point.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import wrappers_pb2 # type: ignore diff --git a/packages/google-apps-script-type/google/apps/script/type/types/script_manifest.py b/packages/google-apps-script-type/google/apps/script/type/types/script_manifest.py index 2f6b3080134d..c0ed3cb2c44e 100644 --- a/packages/google-apps-script-type/google/apps/script/type/types/script_manifest.py +++ b/packages/google-apps-script-type/google/apps/script/type/types/script_manifest.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import struct_pb2 # type: ignore diff --git a/packages/google-apps-script-type/noxfile.py b/packages/google-apps-script-type/noxfile.py index fcacc5d528dd..1749edb5dad7 100644 --- a/packages/google-apps-script-type/noxfile.py +++ b/packages/google-apps-script-type/noxfile.py @@ -189,9 +189,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -378,8 +378,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", diff --git a/packages/google-apps-script-type/setup.py b/packages/google-apps-script-type/setup.py index d0fe87c9e48f..a866e7ea8de1 100644 --- a/packages/google-apps-script-type/setup.py +++ b/packages/google-apps-script-type/setup.py @@ -55,9 +55,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.apps", "google.apps.script"] setuptools.setup( name=name, diff --git a/packages/google-area120-tables/.OwlBot.yaml b/packages/google-area120-tables/.OwlBot.yaml new file mode 100644 index 000000000000..e70e8a9b08cb --- /dev/null +++ b/packages/google-area120-tables/.OwlBot.yaml @@ -0,0 +1,24 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/area120/tables/(v.*)/.*-py + dest: /owl-bot-staging/google-area120-tables/$1 + +begin-after-commit-hash: ee56c3493ec6aeb237ff515ecea949710944a20f + diff --git a/packages/google-area120-tables/.coveragerc b/packages/google-area120-tables/.coveragerc new file mode 100644 index 000000000000..fc368d4a4309 --- /dev/null +++ b/packages/google-area120-tables/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/area120/tables/__init__.py + google/area120/tables/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-area120-tables/.flake8 b/packages/google-area120-tables/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-area120-tables/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-area120-tables/.gitignore b/packages/google-area120-tables/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-area120-tables/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-area120-tables/.repo-metadata.json b/packages/google-area120-tables/.repo-metadata.json new file mode 100644 index 000000000000..7bfe31495402 --- /dev/null +++ b/packages/google-area120-tables/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "area120tables", + "name_pretty": "Area 120 Tables", + "product_documentation": "https://area120.google.com", + "client_documentation": "https://googleapis.dev/python/area120tables/latest", + "issue_tracker": "", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-area120-tables", + "api_id": "area120tables.googleapis.com", + "default_version": "v1alpha1", + "codeowner_team": "", + "api_shortname": "area120tables", + "api_description": "provides programmatic methods to the Area 120 Tables API." +} diff --git a/packages/google-area120-tables/CHANGELOG.md b/packages/google-area120-tables/CHANGELOG.md new file mode 100644 index 000000000000..a32c9471f453 --- /dev/null +++ b/packages/google-area120-tables/CHANGELOG.md @@ -0,0 +1,265 @@ +# Changelog + +## [0.11.2](https://github.com/googleapis/python-area120-tables/compare/v0.11.1...v0.11.2) (2023-03-23) + + +### Documentation + +* Fix formatting of request arg in docstring ([#230](https://github.com/googleapis/python-area120-tables/issues/230)) ([2ad5d77](https://github.com/googleapis/python-area120-tables/commit/2ad5d77309bc8e7eb63af60b796d85e1306c2278)) + +## [0.11.1](https://github.com/googleapis/python-area120-tables/compare/v0.11.0...v0.11.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([a9d18ca](https://github.com/googleapis/python-area120-tables/commit/a9d18cac29a6607520ba25e6d8ad2ad862f4710c)) + + +### Documentation + +* Add documentation for enums ([a9d18ca](https://github.com/googleapis/python-area120-tables/commit/a9d18cac29a6607520ba25e6d8ad2ad862f4710c)) + +## [0.11.0](https://github.com/googleapis/python-area120-tables/compare/v0.10.1...v0.11.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#219](https://github.com/googleapis/python-area120-tables/issues/219)) ([332bff3](https://github.com/googleapis/python-area120-tables/commit/332bff3f50052a41fcc44d7c6ee56652540a60ba)) + +## [0.10.1](https://github.com/googleapis/python-area120-tables/compare/v0.10.0...v0.10.1) (2022-12-14) + + +### Bug Fixes + +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([4392d33](https://github.com/googleapis/python-area120-tables/commit/4392d33bf965c7a8db803d0fac9af4df02391a97)) +* Drop usage of pkg_resources ([4392d33](https://github.com/googleapis/python-area120-tables/commit/4392d33bf965c7a8db803d0fac9af4df02391a97)) +* Fix timeout default values ([4392d33](https://github.com/googleapis/python-area120-tables/commit/4392d33bf965c7a8db803d0fac9af4df02391a97)) + + +### Documentation + +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([4392d33](https://github.com/googleapis/python-area120-tables/commit/4392d33bf965c7a8db803d0fac9af4df02391a97)) + +## [0.10.0](https://github.com/googleapis/python-area120-tables/compare/v0.9.0...v0.10.0) (2022-11-16) + + +### Features + +* Add typing to proto.Message based class attributes ([dddcfad](https://github.com/googleapis/python-area120-tables/commit/dddcfade3932b8aa9f303a299200175e4f8547b1)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([dddcfad](https://github.com/googleapis/python-area120-tables/commit/dddcfade3932b8aa9f303a299200175e4f8547b1)) + +## [0.9.0](https://github.com/googleapis/python-area120-tables/compare/v0.8.2...v0.9.0) (2022-11-08) + + +### Features + +* add support for `google.area120.tables.__version__` ([fc87e8b](https://github.com/googleapis/python-area120-tables/commit/fc87e8bb6bd449ad5fe498b361c53f6e0cf51dc4)) + + +### Bug Fixes + +* Add dict typing for client_options ([fc87e8b](https://github.com/googleapis/python-area120-tables/commit/fc87e8bb6bd449ad5fe498b361c53f6e0cf51dc4)) + +## [0.8.2](https://github.com/googleapis/python-area120-tables/compare/v0.8.1...v0.8.2) (2022-10-10) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#201](https://github.com/googleapis/python-area120-tables/issues/201)) ([9bf9172](https://github.com/googleapis/python-area120-tables/commit/9bf91729e41f11b3e2979690f768e708a8ee936a)) +* **deps:** require google-api-core>=1.33.2 ([9bf9172](https://github.com/googleapis/python-area120-tables/commit/9bf91729e41f11b3e2979690f768e708a8ee936a)) + +## [0.8.1](https://github.com/googleapis/python-area120-tables/compare/v0.8.0...v0.8.1) (2022-10-03) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#198](https://github.com/googleapis/python-area120-tables/issues/198)) ([5813086](https://github.com/googleapis/python-area120-tables/commit/58130869fcf96d129dd0a72cada69df9ad868e19)) + +## [0.8.0](https://github.com/googleapis/python-area120-tables/compare/v0.7.1...v0.8.0) (2022-09-16) + + +### Features + +* Add support for REST transport ([#194](https://github.com/googleapis/python-area120-tables/issues/194)) ([14bab6c](https://github.com/googleapis/python-area120-tables/commit/14bab6c92a74aa27b2dac80543171082a7c4c189)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.33.1,>=2.8.0 ([14bab6c](https://github.com/googleapis/python-area120-tables/commit/14bab6c92a74aa27b2dac80543171082a7c4c189)) +* **deps:** require protobuf >= 3.20.1 ([14bab6c](https://github.com/googleapis/python-area120-tables/commit/14bab6c92a74aa27b2dac80543171082a7c4c189)) + +## [0.7.1](https://github.com/googleapis/python-area120-tables/compare/v0.7.0...v0.7.1) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#181](https://github.com/googleapis/python-area120-tables/issues/181)) ([6c45184](https://github.com/googleapis/python-area120-tables/commit/6c451840d2e6169c108ad6e455289e9d81c6c317)) +* **deps:** require proto-plus >= 1.22.0 ([6c45184](https://github.com/googleapis/python-area120-tables/commit/6c451840d2e6169c108ad6e455289e9d81c6c317)) + +## [0.7.0](https://github.com/googleapis/python-area120-tables/compare/v0.6.2...v0.7.0) (2022-07-18) + + +### Features + +* add audience parameter ([64ffe58](https://github.com/googleapis/python-area120-tables/commit/64ffe58e32298fd0a9305bfe0f142374062e39e2)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#173](https://github.com/googleapis/python-area120-tables/issues/173)) ([64ffe58](https://github.com/googleapis/python-area120-tables/commit/64ffe58e32298fd0a9305bfe0f142374062e39e2)) +* require python 3.7+ ([#175](https://github.com/googleapis/python-area120-tables/issues/175)) ([0cab642](https://github.com/googleapis/python-area120-tables/commit/0cab642e514072540128d3df941281ccef2e776c)) + +## [0.6.2](https://github.com/googleapis/python-area120-tables/compare/v0.6.1...v0.6.2) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#165](https://github.com/googleapis/python-area120-tables/issues/165)) ([4475608](https://github.com/googleapis/python-area120-tables/commit/44756082f10cf31ed691d5345216de396efae88c)) + + +### Documentation + +* fix changelog header to consistent size ([#166](https://github.com/googleapis/python-area120-tables/issues/166)) ([bb33737](https://github.com/googleapis/python-area120-tables/commit/bb3373758f17d092bd65510107cd162d24e696fe)) + +## [0.6.1](https://github.com/googleapis/python-area120-tables/compare/v0.6.0...v0.6.1) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#130](https://github.com/googleapis/python-area120-tables/issues/130)) ([0e565da](https://github.com/googleapis/python-area120-tables/commit/0e565da03fe9a5f32c9e08435e0674ca4ef0e0ae)) +* **deps:** require proto-plus>=1.15.0 ([0e565da](https://github.com/googleapis/python-area120-tables/commit/0e565da03fe9a5f32c9e08435e0674ca4ef0e0ae)) + +## [0.6.0](https://github.com/googleapis/python-area120-tables/compare/v0.5.1...v0.6.0) (2022-02-26) + + +### Features + +* add api key support ([#116](https://github.com/googleapis/python-area120-tables/issues/116)) ([48c0ead](https://github.com/googleapis/python-area120-tables/commit/48c0ead49612fc58ea39ed55745abd73a83f3732)) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([6474a00](https://github.com/googleapis/python-area120-tables/commit/6474a00180d134ab4c2d1d0a2328531758ae140d)) + + +### Documentation + +* add autogenerated code snippets ([ae17805](https://github.com/googleapis/python-area120-tables/commit/ae17805b6ea2307947129caba2185ade52a5e74c)) + +## [0.5.1](https://www.github.com/googleapis/python-area120-tables/compare/v0.5.0...v0.5.1) (2021-11-01) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([76b9cba](https://www.github.com/googleapis/python-area120-tables/commit/76b9cba55f3ba56dd015591820c34f59b68c796d)) +* **deps:** require google-api-core >= 1.28.0 ([76b9cba](https://www.github.com/googleapis/python-area120-tables/commit/76b9cba55f3ba56dd015591820c34f59b68c796d)) + + +### Documentation + +* list oneofs in docstring ([76b9cba](https://www.github.com/googleapis/python-area120-tables/commit/76b9cba55f3ba56dd015591820c34f59b68c796d)) + +## [0.5.0](https://www.github.com/googleapis/python-area120-tables/compare/v0.4.4...v0.5.0) (2021-10-11) + + +### Features + +* add context manager support in client ([#96](https://www.github.com/googleapis/python-area120-tables/issues/96)) ([8990f1b](https://www.github.com/googleapis/python-area120-tables/commit/8990f1b4539f8bf0d3a327c4327a12d486447cbf)) +* add trove classifier for python 3.9 and python 3.10 ([#99](https://www.github.com/googleapis/python-area120-tables/issues/99)) ([c9cee0f](https://www.github.com/googleapis/python-area120-tables/commit/c9cee0f8cb1bf6ddb4b7bcedfd88494e9cedcf32)) + +## [0.4.4](https://www.github.com/googleapis/python-area120-tables/compare/v0.4.3...v0.4.4) (2021-09-30) + + +### Bug Fixes + +* improper types in pagers generation ([18767c6](https://www.github.com/googleapis/python-area120-tables/commit/18767c6fe872942866266c0530a95a8d5160b8e5)) + +## [0.4.3](https://www.github.com/googleapis/python-area120-tables/compare/v0.4.2...v0.4.3) (2021-09-24) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([ee57f81](https://www.github.com/googleapis/python-area120-tables/commit/ee57f81534ac3cc21ced61659ee1796dbe5b4210)) + +## [0.4.2](https://www.github.com/googleapis/python-area120-tables/compare/v0.4.1...v0.4.2) (2021-07-26) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#69](https://www.github.com/googleapis/python-area120-tables/issues/69)) ([954f6c7](https://www.github.com/googleapis/python-area120-tables/commit/954f6c7ef550d502cb75edb1f981de8eb67849b1)) +* enable self signed jwt for grpc ([#75](https://www.github.com/googleapis/python-area120-tables/issues/75)) ([75f29fc](https://www.github.com/googleapis/python-area120-tables/commit/75f29fc84173a1c9497d261fc74d71cb068a41af)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#70](https://www.github.com/googleapis/python-area120-tables/issues/70)) ([15716f3](https://www.github.com/googleapis/python-area120-tables/commit/15716f32094df082a6536513699e68ab308aaf17)) + + +### Miscellaneous Chores + +* release as 0.4.2 ([#74](https://www.github.com/googleapis/python-area120-tables/issues/74)) ([682d5dd](https://www.github.com/googleapis/python-area120-tables/commit/682d5dd57c6f752595401bd2d5d232f875bc163b)) + +## [0.4.1](https://www.github.com/googleapis/python-area120-tables/compare/v0.4.0...v0.4.1) (2021-06-30) + + +### Bug Fixes + +* disable always_use_jwt_access ([5c043fa](https://www.github.com/googleapis/python-area120-tables/commit/5c043fa62bd8e7cdb27a2392a72a74bb15d2e9f4)) +* disable always_use_jwt_access ([#65](https://www.github.com/googleapis/python-area120-tables/issues/65)) ([5c043fa](https://www.github.com/googleapis/python-area120-tables/commit/5c043fa62bd8e7cdb27a2392a72a74bb15d2e9f4)) + +## [0.4.0](https://www.github.com/googleapis/python-area120-tables/compare/v0.3.1...v0.4.0) (2021-06-23) + + +### Features + +* add always_use_jwt_access ([#61](https://www.github.com/googleapis/python-area120-tables/issues/61)) ([91c6f3a](https://www.github.com/googleapis/python-area120-tables/commit/91c6f3adf14a642aba2c0e18158536e9f3031f59)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-area120-tables/issues/1127)) ([#56](https://www.github.com/googleapis/python-area120-tables/issues/56)) ([4bf2bae](https://www.github.com/googleapis/python-area120-tables/commit/4bf2baea55b617393487c2ae6866f04ef073378d)), closes [#1126](https://www.github.com/googleapis/python-area120-tables/issues/1126) + +## [0.3.1](https://www.github.com/googleapis/python-area120-tables/compare/v0.3.0...v0.3.1) (2021-06-16) + + +### Bug Fixes + +* exclude docs and tests from package ([#52](https://www.github.com/googleapis/python-area120-tables/issues/52)) ([da319aa](https://www.github.com/googleapis/python-area120-tables/commit/da319aa3611e82602a4085a7b4472b2ad074b0a7)) + +## [0.3.0](https://www.github.com/googleapis/python-area120-tables/compare/v0.2.0...v0.3.0) (2021-05-28) + + +### Features + +* Added ListWorkspaces, GetWorkspace, BatchDeleteRows APIs ([#25](https://www.github.com/googleapis/python-area120-tables/issues/25)) ([99c6918](https://www.github.com/googleapis/python-area120-tables/commit/99c691819824ab6cc1915ba23867a5051b94b8a2)) +* support self-signed JWT flow for service accounts ([fe5836e](https://www.github.com/googleapis/python-area120-tables/commit/fe5836e050cc3f548c9a98af73d01466e23e8404)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([fe5836e](https://www.github.com/googleapis/python-area120-tables/commit/fe5836e050cc3f548c9a98af73d01466e23e8404)) +* **deps:** add packaging requirement ([#47](https://www.github.com/googleapis/python-area120-tables/issues/47)) ([c7cee0b](https://www.github.com/googleapis/python-area120-tables/commit/c7cee0bd0252560e603f1456bdc577b26d477c87)) + +## [0.2.0](https://www.github.com/googleapis/python-area120-tables/compare/v0.1.0...v0.2.0) (2021-01-29) + + +### Features + +* add common resource paths, expose client transport ([#4](https://www.github.com/googleapis/python-area120-tables/issues/4)) ([e2367d0](https://www.github.com/googleapis/python-area120-tables/commit/e2367d0be19e5e8e353ad9757a5b2ba730168b4c)) + + +### Bug Fixes + +* remove client recv msg limit and add enums to `types/__init__.py` ([#13](https://www.github.com/googleapis/python-area120-tables/issues/13)) ([302c071](https://www.github.com/googleapis/python-area120-tables/commit/302c071a8493f24b85938b48b66ff6eb83203eda)) + +## 0.1.0 (2020-09-14) + + +### Features + +* add v1alpha1 ([fc837f9](https://www.github.com/googleapis/python-area120-tables/commit/fc837f99e01228db2cb844376e2f27be6bff3cd6)) +* generate v1alpha1 ([7c9b3c5](https://www.github.com/googleapis/python-area120-tables/commit/7c9b3c552c3e99934c71bc64f6f6421343470875)) diff --git a/packages/google-area120-tables/CODE_OF_CONDUCT.md b/packages/google-area120-tables/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-area120-tables/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-area120-tables/CONTRIBUTING.rst b/packages/google-area120-tables/CONTRIBUTING.rst new file mode 100644 index 000000000000..baf031bb7cac --- /dev/null +++ b/packages/google-area120-tables/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-area120-tables + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-area120-tables/LICENSE b/packages/google-area120-tables/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-area120-tables/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-area120-tables/MANIFEST.in b/packages/google-area120-tables/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-area120-tables/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-area120-tables/README.rst b/packages/google-area120-tables/README.rst new file mode 100644 index 000000000000..e8a69d6aad5d --- /dev/null +++ b/packages/google-area120-tables/README.rst @@ -0,0 +1,107 @@ +Python Client for Area 120 Tables +================================= + +|preview| |pypi| |versions| + +`Area 120 Tables`_: provides programmatic methods to the Area 120 Tables API. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-area120-tables.svg + :target: https://pypi.org/project/google-area120-tables/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-area120-tables.svg + :target: https://pypi.org/project/google-area120-tables/ +.. _Area 120 Tables: https://area120.google.com +.. _Client Library Documentation: https://googleapis.dev/python/area120tables/latest +.. _Product Documentation: https://area120.google.com + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Area 120 Tables.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Area 120 Tables.: https://area120.google.com +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-area120-tables + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-area120-tables + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Area 120 Tables + to see other available methods on the client. +- Read the `Area 120 Tables Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Area 120 Tables Product documentation: https://area120.google.com +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-area120-tables/SECURITY.md b/packages/google-area120-tables/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-area120-tables/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-area120-tables/docs/CHANGELOG.md b/packages/google-area120-tables/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-area120-tables/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-area120-tables/docs/README.rst b/packages/google-area120-tables/docs/README.rst new file mode 100644 index 000000000000..e8b5b0f0f183 --- /dev/null +++ b/packages/google-area120-tables/docs/README.rst @@ -0,0 +1,82 @@ +Python Client for Area 120 Tables +================================================= + +|alpha| |pypi| |versions| + +`Area 120 Tables API`_: + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |alpha| image:: https://img.shields.io/badge/support-alpha-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#alpha-support +.. |pypi| image:: https://img.shields.io/pypi/v/google-area120-tables.svg + :target: https://pypi.org/project/google-area120-tables/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-area120-tables.svg + :target: https://pypi.org/project/google-area120-tables/ +.. _Area 120 Tables API: https://area120.google.com +.. _Client Library Documentation: https://googleapis.dev/python/area120tables/latest +.. _Product Documentation: https://area120.google.com + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Area 120 Tables API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Area 120 Tables API.: https://area120.google.com +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-area120-tables + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-area120-tables + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Area 120 Tables API + API to see other available methods on the client. +- Read the `Area 120 Tables API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Area 120 Tables API Product documentation: https://area120.google.com +.. _README: https://github.com/googleapis/google-cloud-python/blob/master/README.rst \ No newline at end of file diff --git a/packages/google-area120-tables/docs/_static/custom.css b/packages/google-area120-tables/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-area120-tables/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-area120-tables/docs/_templates/layout.html b/packages/google-area120-tables/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-area120-tables/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-area120-tables/docs/conf.py b/packages/google-area120-tables/docs/conf.py new file mode 100644 index 000000000000..744732ff148c --- /dev/null +++ b/packages/google-area120-tables/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-area120-tables documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-area120-tables" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-area120-tables", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-area120-tables-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-area120-tables.tex", + "google-area120-tables Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-area120-tables", + "google-area120-tables Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-area120-tables", + "google-area120-tables Documentation", + author, + "google-area120-tables", + "google-area120-tables Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-area120-tables/docs/index.rst b/packages/google-area120-tables/docs/index.rst new file mode 100644 index 000000000000..570535dab44c --- /dev/null +++ b/packages/google-area120-tables/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + tables_v1alpha1/services + tables_v1alpha1/types + + +Changelog +--------- + +For a list of all ``google-area120-tables`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-area120-tables/docs/multiprocessing.rst b/packages/google-area120-tables/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-area120-tables/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-area120-tables/docs/tables_v1alpha1/services.rst b/packages/google-area120-tables/docs/tables_v1alpha1/services.rst new file mode 100644 index 000000000000..286fd4691beb --- /dev/null +++ b/packages/google-area120-tables/docs/tables_v1alpha1/services.rst @@ -0,0 +1,6 @@ +Services for Google Area120 Tables v1alpha1 API +=============================================== +.. toctree:: + :maxdepth: 2 + + tables_service diff --git a/packages/google-area120-tables/docs/tables_v1alpha1/tables_service.rst b/packages/google-area120-tables/docs/tables_v1alpha1/tables_service.rst new file mode 100644 index 000000000000..766388ee81c9 --- /dev/null +++ b/packages/google-area120-tables/docs/tables_v1alpha1/tables_service.rst @@ -0,0 +1,10 @@ +TablesService +------------------------------- + +.. automodule:: google.area120.tables_v1alpha1.services.tables_service + :members: + :inherited-members: + +.. automodule:: google.area120.tables_v1alpha1.services.tables_service.pagers + :members: + :inherited-members: diff --git a/packages/google-area120-tables/docs/tables_v1alpha1/types.rst b/packages/google-area120-tables/docs/tables_v1alpha1/types.rst new file mode 100644 index 000000000000..9c77b6c0de0d --- /dev/null +++ b/packages/google-area120-tables/docs/tables_v1alpha1/types.rst @@ -0,0 +1,6 @@ +Types for Google Area120 Tables v1alpha1 API +============================================ + +.. automodule:: google.area120.tables_v1alpha1.types + :members: + :show-inheritance: diff --git a/packages/google-area120-tables/google/area120/tables/__init__.py b/packages/google-area120-tables/google/area120/tables/__init__.py new file mode 100644 index 000000000000..bc562d3cf183 --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables/__init__.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.area120.tables import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.area120.tables_v1alpha1.services.tables_service.async_client import ( + TablesServiceAsyncClient, +) +from google.area120.tables_v1alpha1.services.tables_service.client import ( + TablesServiceClient, +) +from google.area120.tables_v1alpha1.types.tables import ( + BatchCreateRowsRequest, + BatchCreateRowsResponse, + BatchDeleteRowsRequest, + BatchUpdateRowsRequest, + BatchUpdateRowsResponse, + ColumnDescription, + CreateRowRequest, + DeleteRowRequest, + GetRowRequest, + GetTableRequest, + GetWorkspaceRequest, + LabeledItem, + ListRowsRequest, + ListRowsResponse, + ListTablesRequest, + ListTablesResponse, + ListWorkspacesRequest, + ListWorkspacesResponse, + LookupDetails, + RelationshipDetails, + Row, + Table, + UpdateRowRequest, + View, + Workspace, +) + +__all__ = ( + "TablesServiceClient", + "TablesServiceAsyncClient", + "BatchCreateRowsRequest", + "BatchCreateRowsResponse", + "BatchDeleteRowsRequest", + "BatchUpdateRowsRequest", + "BatchUpdateRowsResponse", + "ColumnDescription", + "CreateRowRequest", + "DeleteRowRequest", + "GetRowRequest", + "GetTableRequest", + "GetWorkspaceRequest", + "LabeledItem", + "ListRowsRequest", + "ListRowsResponse", + "ListTablesRequest", + "ListTablesResponse", + "ListWorkspacesRequest", + "ListWorkspacesResponse", + "LookupDetails", + "RelationshipDetails", + "Row", + "Table", + "UpdateRowRequest", + "Workspace", + "View", +) diff --git a/packages/google-area120-tables/google/area120/tables/gapic_version.py b/packages/google-area120-tables/google/area120/tables/gapic_version.py new file mode 100644 index 000000000000..d8ae9e009f6a --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.11.2" # {x-release-please-version} diff --git a/packages/google-area120-tables/google/area120/tables/py.typed b/packages/google-area120-tables/google/area120/tables/py.typed new file mode 100644 index 000000000000..79b3d533062a --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-area120-tables package uses inline types. diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/__init__.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/__init__.py new file mode 100644 index 000000000000..8a21501a657d --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/__init__.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.area120.tables_v1alpha1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.tables_service import TablesServiceAsyncClient, TablesServiceClient +from .types.tables import ( + BatchCreateRowsRequest, + BatchCreateRowsResponse, + BatchDeleteRowsRequest, + BatchUpdateRowsRequest, + BatchUpdateRowsResponse, + ColumnDescription, + CreateRowRequest, + DeleteRowRequest, + GetRowRequest, + GetTableRequest, + GetWorkspaceRequest, + LabeledItem, + ListRowsRequest, + ListRowsResponse, + ListTablesRequest, + ListTablesResponse, + ListWorkspacesRequest, + ListWorkspacesResponse, + LookupDetails, + RelationshipDetails, + Row, + Table, + UpdateRowRequest, + View, + Workspace, +) + +__all__ = ( + "TablesServiceAsyncClient", + "BatchCreateRowsRequest", + "BatchCreateRowsResponse", + "BatchDeleteRowsRequest", + "BatchUpdateRowsRequest", + "BatchUpdateRowsResponse", + "ColumnDescription", + "CreateRowRequest", + "DeleteRowRequest", + "GetRowRequest", + "GetTableRequest", + "GetWorkspaceRequest", + "LabeledItem", + "ListRowsRequest", + "ListRowsResponse", + "ListTablesRequest", + "ListTablesResponse", + "ListWorkspacesRequest", + "ListWorkspacesResponse", + "LookupDetails", + "RelationshipDetails", + "Row", + "Table", + "TablesServiceClient", + "UpdateRowRequest", + "View", + "Workspace", +) diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_metadata.json b/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_metadata.json new file mode 100644 index 000000000000..c59c4471f838 --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_metadata.json @@ -0,0 +1,208 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.area120.tables_v1alpha1", + "protoPackage": "google.area120.tables.v1alpha1", + "schema": "1.0", + "services": { + "TablesService": { + "clients": { + "grpc": { + "libraryClient": "TablesServiceClient", + "rpcs": { + "BatchCreateRows": { + "methods": [ + "batch_create_rows" + ] + }, + "BatchDeleteRows": { + "methods": [ + "batch_delete_rows" + ] + }, + "BatchUpdateRows": { + "methods": [ + "batch_update_rows" + ] + }, + "CreateRow": { + "methods": [ + "create_row" + ] + }, + "DeleteRow": { + "methods": [ + "delete_row" + ] + }, + "GetRow": { + "methods": [ + "get_row" + ] + }, + "GetTable": { + "methods": [ + "get_table" + ] + }, + "GetWorkspace": { + "methods": [ + "get_workspace" + ] + }, + "ListRows": { + "methods": [ + "list_rows" + ] + }, + "ListTables": { + "methods": [ + "list_tables" + ] + }, + "ListWorkspaces": { + "methods": [ + "list_workspaces" + ] + }, + "UpdateRow": { + "methods": [ + "update_row" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TablesServiceAsyncClient", + "rpcs": { + "BatchCreateRows": { + "methods": [ + "batch_create_rows" + ] + }, + "BatchDeleteRows": { + "methods": [ + "batch_delete_rows" + ] + }, + "BatchUpdateRows": { + "methods": [ + "batch_update_rows" + ] + }, + "CreateRow": { + "methods": [ + "create_row" + ] + }, + "DeleteRow": { + "methods": [ + "delete_row" + ] + }, + "GetRow": { + "methods": [ + "get_row" + ] + }, + "GetTable": { + "methods": [ + "get_table" + ] + }, + "GetWorkspace": { + "methods": [ + "get_workspace" + ] + }, + "ListRows": { + "methods": [ + "list_rows" + ] + }, + "ListTables": { + "methods": [ + "list_tables" + ] + }, + "ListWorkspaces": { + "methods": [ + "list_workspaces" + ] + }, + "UpdateRow": { + "methods": [ + "update_row" + ] + } + } + }, + "rest": { + "libraryClient": "TablesServiceClient", + "rpcs": { + "BatchCreateRows": { + "methods": [ + "batch_create_rows" + ] + }, + "BatchDeleteRows": { + "methods": [ + "batch_delete_rows" + ] + }, + "BatchUpdateRows": { + "methods": [ + "batch_update_rows" + ] + }, + "CreateRow": { + "methods": [ + "create_row" + ] + }, + "DeleteRow": { + "methods": [ + "delete_row" + ] + }, + "GetRow": { + "methods": [ + "get_row" + ] + }, + "GetTable": { + "methods": [ + "get_table" + ] + }, + "GetWorkspace": { + "methods": [ + "get_workspace" + ] + }, + "ListRows": { + "methods": [ + "list_rows" + ] + }, + "ListTables": { + "methods": [ + "list_tables" + ] + }, + "ListWorkspaces": { + "methods": [ + "list_workspaces" + ] + }, + "UpdateRow": { + "methods": [ + "update_row" + ] + } + } + } + } + } + } +} diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py new file mode 100644 index 000000000000..d8ae9e009f6a --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.11.2" # {x-release-please-version} diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/py.typed b/packages/google-area120-tables/google/area120/tables_v1alpha1/py.typed new file mode 100644 index 000000000000..79b3d533062a --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-area120-tables package uses inline types. diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/__init__.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/__init__.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/__init__.py new file mode 100644 index 000000000000..5e13cc5e7a38 --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import TablesServiceAsyncClient +from .client import TablesServiceClient + +__all__ = ( + "TablesServiceClient", + "TablesServiceAsyncClient", +) diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/async_client.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/async_client.py new file mode 100644 index 000000000000..91d815e90ff5 --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/async_client.py @@ -0,0 +1,1379 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.area120.tables_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.area120.tables_v1alpha1.services.tables_service import pagers +from google.area120.tables_v1alpha1.types import tables + +from .client import TablesServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, TablesServiceTransport +from .transports.grpc_asyncio import TablesServiceGrpcAsyncIOTransport + + +class TablesServiceAsyncClient: + """The Tables Service provides an API for reading and updating tables. + It defines the following resource model: + + - The API has a collection of + [Table][google.area120.tables.v1alpha1.Table] resources, named + ``tables/*`` + + - Each Table has a collection of + [Row][google.area120.tables.v1alpha1.Row] resources, named + ``tables/*/rows/*`` + + - The API has a collection of + [Workspace][google.area120.tables.v1alpha1.Workspace] resources, + named ``workspaces/*``. + """ + + _client: TablesServiceClient + + DEFAULT_ENDPOINT = TablesServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TablesServiceClient.DEFAULT_MTLS_ENDPOINT + + row_path = staticmethod(TablesServiceClient.row_path) + parse_row_path = staticmethod(TablesServiceClient.parse_row_path) + table_path = staticmethod(TablesServiceClient.table_path) + parse_table_path = staticmethod(TablesServiceClient.parse_table_path) + workspace_path = staticmethod(TablesServiceClient.workspace_path) + parse_workspace_path = staticmethod(TablesServiceClient.parse_workspace_path) + common_billing_account_path = staticmethod( + TablesServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + TablesServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(TablesServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + TablesServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + TablesServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + TablesServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(TablesServiceClient.common_project_path) + parse_common_project_path = staticmethod( + TablesServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(TablesServiceClient.common_location_path) + parse_common_location_path = staticmethod( + TablesServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TablesServiceAsyncClient: The constructed client. + """ + return TablesServiceClient.from_service_account_info.__func__(TablesServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TablesServiceAsyncClient: The constructed client. + """ + return TablesServiceClient.from_service_account_file.__func__(TablesServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TablesServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> TablesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TablesServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(TablesServiceClient).get_transport_class, type(TablesServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, TablesServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the tables service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.TablesServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TablesServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_table( + self, + request: Optional[Union[tables.GetTableRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.Table: + r"""Gets a table. Returns NOT_FOUND if the table does not exist. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + async def sample_get_table(): + # Create a client + client = tables_v1alpha1.TablesServiceAsyncClient() + + # Initialize request argument(s) + request = tables_v1alpha1.GetTableRequest( + name="name_value", + ) + + # Make the request + response = await client.get_table(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.area120.tables_v1alpha1.types.GetTableRequest, dict]]): + The request object. Request message for + TablesService.GetTable. + name (:class:`str`): + Required. The name of the table to + retrieve. Format: tables/{table} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.types.Table: + A single table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = tables.GetTableRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_table, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_tables( + self, + request: Optional[Union[tables.ListTablesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTablesAsyncPager: + r"""Lists tables for the user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + async def sample_list_tables(): + # Create a client + client = tables_v1alpha1.TablesServiceAsyncClient() + + # Initialize request argument(s) + request = tables_v1alpha1.ListTablesRequest( + ) + + # Make the request + page_result = client.list_tables(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.area120.tables_v1alpha1.types.ListTablesRequest, dict]]): + The request object. Request message for + TablesService.ListTables. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.services.tables_service.pagers.ListTablesAsyncPager: + Response message for + TablesService.ListTables. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = tables.ListTablesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tables, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTablesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_workspace( + self, + request: Optional[Union[tables.GetWorkspaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.Workspace: + r"""Gets a workspace. Returns NOT_FOUND if the workspace does not + exist. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + async def sample_get_workspace(): + # Create a client + client = tables_v1alpha1.TablesServiceAsyncClient() + + # Initialize request argument(s) + request = tables_v1alpha1.GetWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workspace(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.area120.tables_v1alpha1.types.GetWorkspaceRequest, dict]]): + The request object. Request message for + TablesService.GetWorkspace. + name (:class:`str`): + Required. The name of the workspace + to retrieve. Format: + workspaces/{workspace} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.types.Workspace: + A single workspace. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = tables.GetWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_workspaces( + self, + request: Optional[Union[tables.ListWorkspacesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkspacesAsyncPager: + r"""Lists workspaces for the user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + async def sample_list_workspaces(): + # Create a client + client = tables_v1alpha1.TablesServiceAsyncClient() + + # Initialize request argument(s) + request = tables_v1alpha1.ListWorkspacesRequest( + ) + + # Make the request + page_result = client.list_workspaces(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.area120.tables_v1alpha1.types.ListWorkspacesRequest, dict]]): + The request object. Request message for + TablesService.ListWorkspaces. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.services.tables_service.pagers.ListWorkspacesAsyncPager: + Response message for + TablesService.ListWorkspaces. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = tables.ListWorkspacesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_workspaces, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListWorkspacesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_row( + self, + request: Optional[Union[tables.GetRowRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.Row: + r"""Gets a row. Returns NOT_FOUND if the row does not exist in the + table. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + async def sample_get_row(): + # Create a client + client = tables_v1alpha1.TablesServiceAsyncClient() + + # Initialize request argument(s) + request = tables_v1alpha1.GetRowRequest( + name="name_value", + ) + + # Make the request + response = await client.get_row(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.area120.tables_v1alpha1.types.GetRowRequest, dict]]): + The request object. Request message for + TablesService.GetRow. + name (:class:`str`): + Required. The name of the row to + retrieve. Format: + tables/{table}/rows/{row} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.types.Row: + A single row in a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = tables.GetRowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_row, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_rows( + self, + request: Optional[Union[tables.ListRowsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRowsAsyncPager: + r"""Lists rows in a table. Returns NOT_FOUND if the table does not + exist. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + async def sample_list_rows(): + # Create a client + client = tables_v1alpha1.TablesServiceAsyncClient() + + # Initialize request argument(s) + request = tables_v1alpha1.ListRowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_rows(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.area120.tables_v1alpha1.types.ListRowsRequest, dict]]): + The request object. Request message for + TablesService.ListRows. + parent (:class:`str`): + Required. The parent table. + Format: tables/{table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.services.tables_service.pagers.ListRowsAsyncPager: + Response message for + TablesService.ListRows. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = tables.ListRowsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_rows, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListRowsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_row( + self, + request: Optional[Union[tables.CreateRowRequest, dict]] = None, + *, + parent: Optional[str] = None, + row: Optional[tables.Row] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.Row: + r"""Creates a row. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + async def sample_create_row(): + # Create a client + client = tables_v1alpha1.TablesServiceAsyncClient() + + # Initialize request argument(s) + request = tables_v1alpha1.CreateRowRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_row(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.area120.tables_v1alpha1.types.CreateRowRequest, dict]]): + The request object. Request message for + TablesService.CreateRow. + parent (:class:`str`): + Required. The parent table where this + row will be created. Format: + tables/{table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + row (:class:`google.area120.tables_v1alpha1.types.Row`): + Required. The row to create. + This corresponds to the ``row`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.types.Row: + A single row in a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, row]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = tables.CreateRowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if row is not None: + request.row = row + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_row, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_create_rows( + self, + request: Optional[Union[tables.BatchCreateRowsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.BatchCreateRowsResponse: + r"""Creates multiple rows. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + async def sample_batch_create_rows(): + # Create a client + client = tables_v1alpha1.TablesServiceAsyncClient() + + # Initialize request argument(s) + requests = tables_v1alpha1.CreateRowRequest() + requests.parent = "parent_value" + + request = tables_v1alpha1.BatchCreateRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = await client.batch_create_rows(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.area120.tables_v1alpha1.types.BatchCreateRowsRequest, dict]]): + The request object. Request message for + TablesService.BatchCreateRows. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.types.BatchCreateRowsResponse: + Response message for + TablesService.BatchCreateRows. + + """ + # Create or coerce a protobuf request object. + request = tables.BatchCreateRowsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_create_rows, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_row( + self, + request: Optional[Union[tables.UpdateRowRequest, dict]] = None, + *, + row: Optional[tables.Row] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.Row: + r"""Updates a row. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + async def sample_update_row(): + # Create a client + client = tables_v1alpha1.TablesServiceAsyncClient() + + # Initialize request argument(s) + request = tables_v1alpha1.UpdateRowRequest( + ) + + # Make the request + response = await client.update_row(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.area120.tables_v1alpha1.types.UpdateRowRequest, dict]]): + The request object. Request message for + TablesService.UpdateRow. + row (:class:`google.area120.tables_v1alpha1.types.Row`): + Required. The row to update. + This corresponds to the ``row`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.types.Row: + A single row in a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([row, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = tables.UpdateRowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if row is not None: + request.row = row + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_row, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("row.name", request.row.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_update_rows( + self, + request: Optional[Union[tables.BatchUpdateRowsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.BatchUpdateRowsResponse: + r"""Updates multiple rows. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + async def sample_batch_update_rows(): + # Create a client + client = tables_v1alpha1.TablesServiceAsyncClient() + + # Initialize request argument(s) + request = tables_v1alpha1.BatchUpdateRowsRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_update_rows(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.area120.tables_v1alpha1.types.BatchUpdateRowsRequest, dict]]): + The request object. Request message for + TablesService.BatchUpdateRows. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.types.BatchUpdateRowsResponse: + Response message for + TablesService.BatchUpdateRows. + + """ + # Create or coerce a protobuf request object. + request = tables.BatchUpdateRowsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_update_rows, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_row( + self, + request: Optional[Union[tables.DeleteRowRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a row. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + async def sample_delete_row(): + # Create a client + client = tables_v1alpha1.TablesServiceAsyncClient() + + # Initialize request argument(s) + request = tables_v1alpha1.DeleteRowRequest( + name="name_value", + ) + + # Make the request + await client.delete_row(request=request) + + Args: + request (Optional[Union[google.area120.tables_v1alpha1.types.DeleteRowRequest, dict]]): + The request object. Request message for + TablesService.DeleteRow + name (:class:`str`): + Required. The name of the row to + delete. Format: + tables/{table}/rows/{row} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = tables.DeleteRowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_row, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def batch_delete_rows( + self, + request: Optional[Union[tables.BatchDeleteRowsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes multiple rows. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + async def sample_batch_delete_rows(): + # Create a client + client = tables_v1alpha1.TablesServiceAsyncClient() + + # Initialize request argument(s) + request = tables_v1alpha1.BatchDeleteRowsRequest( + parent="parent_value", + names=['names_value1', 'names_value2'], + ) + + # Make the request + await client.batch_delete_rows(request=request) + + Args: + request (Optional[Union[google.area120.tables_v1alpha1.types.BatchDeleteRowsRequest, dict]]): + The request object. Request message for + TablesService.BatchDeleteRows + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = tables.BatchDeleteRowsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_delete_rows, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TablesServiceAsyncClient",) diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py new file mode 100644 index 000000000000..e3b0776fd923 --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py @@ -0,0 +1,1639 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.area120.tables_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.area120.tables_v1alpha1.services.tables_service import pagers +from google.area120.tables_v1alpha1.types import tables + +from .transports.base import DEFAULT_CLIENT_INFO, TablesServiceTransport +from .transports.grpc import TablesServiceGrpcTransport +from .transports.grpc_asyncio import TablesServiceGrpcAsyncIOTransport +from .transports.rest import TablesServiceRestTransport + + +class TablesServiceClientMeta(type): + """Metaclass for the TablesService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[TablesServiceTransport]] + _transport_registry["grpc"] = TablesServiceGrpcTransport + _transport_registry["grpc_asyncio"] = TablesServiceGrpcAsyncIOTransport + _transport_registry["rest"] = TablesServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[TablesServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TablesServiceClient(metaclass=TablesServiceClientMeta): + """The Tables Service provides an API for reading and updating tables. + It defines the following resource model: + + - The API has a collection of + [Table][google.area120.tables.v1alpha1.Table] resources, named + ``tables/*`` + + - Each Table has a collection of + [Row][google.area120.tables.v1alpha1.Row] resources, named + ``tables/*/rows/*`` + + - The API has a collection of + [Workspace][google.area120.tables.v1alpha1.Workspace] resources, + named ``workspaces/*``. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "area120tables.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TablesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TablesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TablesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TablesServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def row_path( + table: str, + row: str, + ) -> str: + """Returns a fully-qualified row string.""" + return "tables/{table}/rows/{row}".format( + table=table, + row=row, + ) + + @staticmethod + def parse_row_path(path: str) -> Dict[str, str]: + """Parses a row path into its component segments.""" + m = re.match(r"^tables/(?P.+?)/rows/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def table_path( + table: str, + ) -> str: + """Returns a fully-qualified table string.""" + return "tables/{table}".format( + table=table, + ) + + @staticmethod + def parse_table_path(path: str) -> Dict[str, str]: + """Parses a table path into its component segments.""" + m = re.match(r"^tables/(?P
.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def workspace_path( + workspace: str, + ) -> str: + """Returns a fully-qualified workspace string.""" + return "workspaces/{workspace}".format( + workspace=workspace, + ) + + @staticmethod + def parse_workspace_path(path: str) -> Dict[str, str]: + """Parses a workspace path into its component segments.""" + m = re.match(r"^workspaces/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TablesServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the tables service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TablesServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TablesServiceTransport): + # transport is a TablesServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get_table( + self, + request: Optional[Union[tables.GetTableRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.Table: + r"""Gets a table. Returns NOT_FOUND if the table does not exist. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + def sample_get_table(): + # Create a client + client = tables_v1alpha1.TablesServiceClient() + + # Initialize request argument(s) + request = tables_v1alpha1.GetTableRequest( + name="name_value", + ) + + # Make the request + response = client.get_table(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.area120.tables_v1alpha1.types.GetTableRequest, dict]): + The request object. Request message for + TablesService.GetTable. + name (str): + Required. The name of the table to + retrieve. Format: tables/{table} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.types.Table: + A single table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a tables.GetTableRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, tables.GetTableRequest): + request = tables.GetTableRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_table] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_tables( + self, + request: Optional[Union[tables.ListTablesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTablesPager: + r"""Lists tables for the user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + def sample_list_tables(): + # Create a client + client = tables_v1alpha1.TablesServiceClient() + + # Initialize request argument(s) + request = tables_v1alpha1.ListTablesRequest( + ) + + # Make the request + page_result = client.list_tables(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.area120.tables_v1alpha1.types.ListTablesRequest, dict]): + The request object. Request message for + TablesService.ListTables. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.services.tables_service.pagers.ListTablesPager: + Response message for + TablesService.ListTables. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a tables.ListTablesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, tables.ListTablesRequest): + request = tables.ListTablesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tables] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTablesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_workspace( + self, + request: Optional[Union[tables.GetWorkspaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.Workspace: + r"""Gets a workspace. Returns NOT_FOUND if the workspace does not + exist. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + def sample_get_workspace(): + # Create a client + client = tables_v1alpha1.TablesServiceClient() + + # Initialize request argument(s) + request = tables_v1alpha1.GetWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = client.get_workspace(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.area120.tables_v1alpha1.types.GetWorkspaceRequest, dict]): + The request object. Request message for + TablesService.GetWorkspace. + name (str): + Required. The name of the workspace + to retrieve. Format: + workspaces/{workspace} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.types.Workspace: + A single workspace. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a tables.GetWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, tables.GetWorkspaceRequest): + request = tables.GetWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_workspaces( + self, + request: Optional[Union[tables.ListWorkspacesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkspacesPager: + r"""Lists workspaces for the user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + def sample_list_workspaces(): + # Create a client + client = tables_v1alpha1.TablesServiceClient() + + # Initialize request argument(s) + request = tables_v1alpha1.ListWorkspacesRequest( + ) + + # Make the request + page_result = client.list_workspaces(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.area120.tables_v1alpha1.types.ListWorkspacesRequest, dict]): + The request object. Request message for + TablesService.ListWorkspaces. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.services.tables_service.pagers.ListWorkspacesPager: + Response message for + TablesService.ListWorkspaces. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a tables.ListWorkspacesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, tables.ListWorkspacesRequest): + request = tables.ListWorkspacesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_workspaces] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListWorkspacesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_row( + self, + request: Optional[Union[tables.GetRowRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.Row: + r"""Gets a row. Returns NOT_FOUND if the row does not exist in the + table. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + def sample_get_row(): + # Create a client + client = tables_v1alpha1.TablesServiceClient() + + # Initialize request argument(s) + request = tables_v1alpha1.GetRowRequest( + name="name_value", + ) + + # Make the request + response = client.get_row(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.area120.tables_v1alpha1.types.GetRowRequest, dict]): + The request object. Request message for + TablesService.GetRow. + name (str): + Required. The name of the row to + retrieve. Format: + tables/{table}/rows/{row} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.types.Row: + A single row in a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a tables.GetRowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, tables.GetRowRequest): + request = tables.GetRowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_row] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_rows( + self, + request: Optional[Union[tables.ListRowsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRowsPager: + r"""Lists rows in a table. Returns NOT_FOUND if the table does not + exist. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + def sample_list_rows(): + # Create a client + client = tables_v1alpha1.TablesServiceClient() + + # Initialize request argument(s) + request = tables_v1alpha1.ListRowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_rows(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.area120.tables_v1alpha1.types.ListRowsRequest, dict]): + The request object. Request message for + TablesService.ListRows. + parent (str): + Required. The parent table. + Format: tables/{table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.services.tables_service.pagers.ListRowsPager: + Response message for + TablesService.ListRows. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a tables.ListRowsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, tables.ListRowsRequest): + request = tables.ListRowsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_rows] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListRowsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_row( + self, + request: Optional[Union[tables.CreateRowRequest, dict]] = None, + *, + parent: Optional[str] = None, + row: Optional[tables.Row] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.Row: + r"""Creates a row. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + def sample_create_row(): + # Create a client + client = tables_v1alpha1.TablesServiceClient() + + # Initialize request argument(s) + request = tables_v1alpha1.CreateRowRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_row(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.area120.tables_v1alpha1.types.CreateRowRequest, dict]): + The request object. Request message for + TablesService.CreateRow. + parent (str): + Required. The parent table where this + row will be created. Format: + tables/{table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + row (google.area120.tables_v1alpha1.types.Row): + Required. The row to create. + This corresponds to the ``row`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.types.Row: + A single row in a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, row]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a tables.CreateRowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, tables.CreateRowRequest): + request = tables.CreateRowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if row is not None: + request.row = row + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_row] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_create_rows( + self, + request: Optional[Union[tables.BatchCreateRowsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.BatchCreateRowsResponse: + r"""Creates multiple rows. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + def sample_batch_create_rows(): + # Create a client + client = tables_v1alpha1.TablesServiceClient() + + # Initialize request argument(s) + requests = tables_v1alpha1.CreateRowRequest() + requests.parent = "parent_value" + + request = tables_v1alpha1.BatchCreateRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_create_rows(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.area120.tables_v1alpha1.types.BatchCreateRowsRequest, dict]): + The request object. Request message for + TablesService.BatchCreateRows. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.types.BatchCreateRowsResponse: + Response message for + TablesService.BatchCreateRows. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a tables.BatchCreateRowsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, tables.BatchCreateRowsRequest): + request = tables.BatchCreateRowsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_create_rows] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_row( + self, + request: Optional[Union[tables.UpdateRowRequest, dict]] = None, + *, + row: Optional[tables.Row] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.Row: + r"""Updates a row. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + def sample_update_row(): + # Create a client + client = tables_v1alpha1.TablesServiceClient() + + # Initialize request argument(s) + request = tables_v1alpha1.UpdateRowRequest( + ) + + # Make the request + response = client.update_row(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.area120.tables_v1alpha1.types.UpdateRowRequest, dict]): + The request object. Request message for + TablesService.UpdateRow. + row (google.area120.tables_v1alpha1.types.Row): + Required. The row to update. + This corresponds to the ``row`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.types.Row: + A single row in a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([row, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a tables.UpdateRowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, tables.UpdateRowRequest): + request = tables.UpdateRowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if row is not None: + request.row = row + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_row] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("row.name", request.row.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_update_rows( + self, + request: Optional[Union[tables.BatchUpdateRowsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.BatchUpdateRowsResponse: + r"""Updates multiple rows. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + def sample_batch_update_rows(): + # Create a client + client = tables_v1alpha1.TablesServiceClient() + + # Initialize request argument(s) + request = tables_v1alpha1.BatchUpdateRowsRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_update_rows(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.area120.tables_v1alpha1.types.BatchUpdateRowsRequest, dict]): + The request object. Request message for + TablesService.BatchUpdateRows. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.area120.tables_v1alpha1.types.BatchUpdateRowsResponse: + Response message for + TablesService.BatchUpdateRows. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a tables.BatchUpdateRowsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, tables.BatchUpdateRowsRequest): + request = tables.BatchUpdateRowsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_update_rows] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_row( + self, + request: Optional[Union[tables.DeleteRowRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a row. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + def sample_delete_row(): + # Create a client + client = tables_v1alpha1.TablesServiceClient() + + # Initialize request argument(s) + request = tables_v1alpha1.DeleteRowRequest( + name="name_value", + ) + + # Make the request + client.delete_row(request=request) + + Args: + request (Union[google.area120.tables_v1alpha1.types.DeleteRowRequest, dict]): + The request object. Request message for + TablesService.DeleteRow + name (str): + Required. The name of the row to + delete. Format: + tables/{table}/rows/{row} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a tables.DeleteRowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, tables.DeleteRowRequest): + request = tables.DeleteRowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_row] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def batch_delete_rows( + self, + request: Optional[Union[tables.BatchDeleteRowsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes multiple rows. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.area120 import tables_v1alpha1 + + def sample_batch_delete_rows(): + # Create a client + client = tables_v1alpha1.TablesServiceClient() + + # Initialize request argument(s) + request = tables_v1alpha1.BatchDeleteRowsRequest( + parent="parent_value", + names=['names_value1', 'names_value2'], + ) + + # Make the request + client.batch_delete_rows(request=request) + + Args: + request (Union[google.area120.tables_v1alpha1.types.BatchDeleteRowsRequest, dict]): + The request object. Request message for + TablesService.BatchDeleteRows + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a tables.BatchDeleteRowsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, tables.BatchDeleteRowsRequest): + request = tables.BatchDeleteRowsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_delete_rows] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "TablesServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TablesServiceClient",) diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/pagers.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/pagers.py new file mode 100644 index 000000000000..a7e1315397c7 --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/pagers.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.area120.tables_v1alpha1.types import tables + + +class ListTablesPager: + """A pager for iterating through ``list_tables`` requests. + + This class thinly wraps an initial + :class:`google.area120.tables_v1alpha1.types.ListTablesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tables`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTables`` requests and continue to iterate + through the ``tables`` field on the + corresponding responses. + + All the usual :class:`google.area120.tables_v1alpha1.types.ListTablesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., tables.ListTablesResponse], + request: tables.ListTablesRequest, + response: tables.ListTablesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.area120.tables_v1alpha1.types.ListTablesRequest): + The initial request object. + response (google.area120.tables_v1alpha1.types.ListTablesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = tables.ListTablesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[tables.ListTablesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[tables.Table]: + for page in self.pages: + yield from page.tables + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTablesAsyncPager: + """A pager for iterating through ``list_tables`` requests. + + This class thinly wraps an initial + :class:`google.area120.tables_v1alpha1.types.ListTablesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tables`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTables`` requests and continue to iterate + through the ``tables`` field on the + corresponding responses. + + All the usual :class:`google.area120.tables_v1alpha1.types.ListTablesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[tables.ListTablesResponse]], + request: tables.ListTablesRequest, + response: tables.ListTablesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.area120.tables_v1alpha1.types.ListTablesRequest): + The initial request object. + response (google.area120.tables_v1alpha1.types.ListTablesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = tables.ListTablesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[tables.ListTablesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[tables.Table]: + async def async_generator(): + async for page in self.pages: + for response in page.tables: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkspacesPager: + """A pager for iterating through ``list_workspaces`` requests. + + This class thinly wraps an initial + :class:`google.area120.tables_v1alpha1.types.ListWorkspacesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``workspaces`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListWorkspaces`` requests and continue to iterate + through the ``workspaces`` field on the + corresponding responses. + + All the usual :class:`google.area120.tables_v1alpha1.types.ListWorkspacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., tables.ListWorkspacesResponse], + request: tables.ListWorkspacesRequest, + response: tables.ListWorkspacesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.area120.tables_v1alpha1.types.ListWorkspacesRequest): + The initial request object. + response (google.area120.tables_v1alpha1.types.ListWorkspacesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = tables.ListWorkspacesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[tables.ListWorkspacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[tables.Workspace]: + for page in self.pages: + yield from page.workspaces + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkspacesAsyncPager: + """A pager for iterating through ``list_workspaces`` requests. + + This class thinly wraps an initial + :class:`google.area120.tables_v1alpha1.types.ListWorkspacesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``workspaces`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListWorkspaces`` requests and continue to iterate + through the ``workspaces`` field on the + corresponding responses. + + All the usual :class:`google.area120.tables_v1alpha1.types.ListWorkspacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[tables.ListWorkspacesResponse]], + request: tables.ListWorkspacesRequest, + response: tables.ListWorkspacesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.area120.tables_v1alpha1.types.ListWorkspacesRequest): + The initial request object. + response (google.area120.tables_v1alpha1.types.ListWorkspacesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = tables.ListWorkspacesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[tables.ListWorkspacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[tables.Workspace]: + async def async_generator(): + async for page in self.pages: + for response in page.workspaces: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRowsPager: + """A pager for iterating through ``list_rows`` requests. + + This class thinly wraps an initial + :class:`google.area120.tables_v1alpha1.types.ListRowsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``rows`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListRows`` requests and continue to iterate + through the ``rows`` field on the + corresponding responses. + + All the usual :class:`google.area120.tables_v1alpha1.types.ListRowsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., tables.ListRowsResponse], + request: tables.ListRowsRequest, + response: tables.ListRowsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.area120.tables_v1alpha1.types.ListRowsRequest): + The initial request object. + response (google.area120.tables_v1alpha1.types.ListRowsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = tables.ListRowsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[tables.ListRowsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[tables.Row]: + for page in self.pages: + yield from page.rows + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRowsAsyncPager: + """A pager for iterating through ``list_rows`` requests. + + This class thinly wraps an initial + :class:`google.area120.tables_v1alpha1.types.ListRowsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``rows`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListRows`` requests and continue to iterate + through the ``rows`` field on the + corresponding responses. + + All the usual :class:`google.area120.tables_v1alpha1.types.ListRowsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[tables.ListRowsResponse]], + request: tables.ListRowsRequest, + response: tables.ListRowsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.area120.tables_v1alpha1.types.ListRowsRequest): + The initial request object. + response (google.area120.tables_v1alpha1.types.ListRowsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = tables.ListRowsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[tables.ListRowsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[tables.Row]: + async def async_generator(): + async for page in self.pages: + for response in page.rows: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/__init__.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/__init__.py new file mode 100644 index 000000000000..2dd46fb49416 --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TablesServiceTransport +from .grpc import TablesServiceGrpcTransport +from .grpc_asyncio import TablesServiceGrpcAsyncIOTransport +from .rest import TablesServiceRestInterceptor, TablesServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TablesServiceTransport]] +_transport_registry["grpc"] = TablesServiceGrpcTransport +_transport_registry["grpc_asyncio"] = TablesServiceGrpcAsyncIOTransport +_transport_registry["rest"] = TablesServiceRestTransport + +__all__ = ( + "TablesServiceTransport", + "TablesServiceGrpcTransport", + "TablesServiceGrpcAsyncIOTransport", + "TablesServiceRestTransport", + "TablesServiceRestInterceptor", +) diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/base.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/base.py new file mode 100644 index 000000000000..3c2480769cad --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/base.py @@ -0,0 +1,310 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.area120.tables_v1alpha1 import gapic_version as package_version +from google.area120.tables_v1alpha1.types import tables + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class TablesServiceTransport(abc.ABC): + """Abstract transport class for TablesService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file", + "https://www.googleapis.com/auth/drive.readonly", + "https://www.googleapis.com/auth/spreadsheets", + "https://www.googleapis.com/auth/spreadsheets.readonly", + "https://www.googleapis.com/auth/tables", + ) + + DEFAULT_HOST: str = "area120tables.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_table: gapic_v1.method.wrap_method( + self.get_table, + default_timeout=60.0, + client_info=client_info, + ), + self.list_tables: gapic_v1.method.wrap_method( + self.list_tables, + default_timeout=60.0, + client_info=client_info, + ), + self.get_workspace: gapic_v1.method.wrap_method( + self.get_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.list_workspaces: gapic_v1.method.wrap_method( + self.list_workspaces, + default_timeout=60.0, + client_info=client_info, + ), + self.get_row: gapic_v1.method.wrap_method( + self.get_row, + default_timeout=60.0, + client_info=client_info, + ), + self.list_rows: gapic_v1.method.wrap_method( + self.list_rows, + default_timeout=60.0, + client_info=client_info, + ), + self.create_row: gapic_v1.method.wrap_method( + self.create_row, + default_timeout=60.0, + client_info=client_info, + ), + self.batch_create_rows: gapic_v1.method.wrap_method( + self.batch_create_rows, + default_timeout=60.0, + client_info=client_info, + ), + self.update_row: gapic_v1.method.wrap_method( + self.update_row, + default_timeout=60.0, + client_info=client_info, + ), + self.batch_update_rows: gapic_v1.method.wrap_method( + self.batch_update_rows, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_row: gapic_v1.method.wrap_method( + self.delete_row, + default_timeout=60.0, + client_info=client_info, + ), + self.batch_delete_rows: gapic_v1.method.wrap_method( + self.batch_delete_rows, + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_table( + self, + ) -> Callable[ + [tables.GetTableRequest], Union[tables.Table, Awaitable[tables.Table]] + ]: + raise NotImplementedError() + + @property + def list_tables( + self, + ) -> Callable[ + [tables.ListTablesRequest], + Union[tables.ListTablesResponse, Awaitable[tables.ListTablesResponse]], + ]: + raise NotImplementedError() + + @property + def get_workspace( + self, + ) -> Callable[ + [tables.GetWorkspaceRequest], + Union[tables.Workspace, Awaitable[tables.Workspace]], + ]: + raise NotImplementedError() + + @property + def list_workspaces( + self, + ) -> Callable[ + [tables.ListWorkspacesRequest], + Union[tables.ListWorkspacesResponse, Awaitable[tables.ListWorkspacesResponse]], + ]: + raise NotImplementedError() + + @property + def get_row( + self, + ) -> Callable[[tables.GetRowRequest], Union[tables.Row, Awaitable[tables.Row]]]: + raise NotImplementedError() + + @property + def list_rows( + self, + ) -> Callable[ + [tables.ListRowsRequest], + Union[tables.ListRowsResponse, Awaitable[tables.ListRowsResponse]], + ]: + raise NotImplementedError() + + @property + def create_row( + self, + ) -> Callable[[tables.CreateRowRequest], Union[tables.Row, Awaitable[tables.Row]]]: + raise NotImplementedError() + + @property + def batch_create_rows( + self, + ) -> Callable[ + [tables.BatchCreateRowsRequest], + Union[ + tables.BatchCreateRowsResponse, Awaitable[tables.BatchCreateRowsResponse] + ], + ]: + raise NotImplementedError() + + @property + def update_row( + self, + ) -> Callable[[tables.UpdateRowRequest], Union[tables.Row, Awaitable[tables.Row]]]: + raise NotImplementedError() + + @property + def batch_update_rows( + self, + ) -> Callable[ + [tables.BatchUpdateRowsRequest], + Union[ + tables.BatchUpdateRowsResponse, Awaitable[tables.BatchUpdateRowsResponse] + ], + ]: + raise NotImplementedError() + + @property + def delete_row( + self, + ) -> Callable[ + [tables.DeleteRowRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: + raise NotImplementedError() + + @property + def batch_delete_rows( + self, + ) -> Callable[ + [tables.BatchDeleteRowsRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("TablesServiceTransport",) diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/grpc.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/grpc.py new file mode 100644 index 000000000000..b9cfa88b1688 --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/grpc.py @@ -0,0 +1,554 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.area120.tables_v1alpha1.types import tables + +from .base import DEFAULT_CLIENT_INFO, TablesServiceTransport + + +class TablesServiceGrpcTransport(TablesServiceTransport): + """gRPC backend transport for TablesService. + + The Tables Service provides an API for reading and updating tables. + It defines the following resource model: + + - The API has a collection of + [Table][google.area120.tables.v1alpha1.Table] resources, named + ``tables/*`` + + - Each Table has a collection of + [Row][google.area120.tables.v1alpha1.Row] resources, named + ``tables/*/rows/*`` + + - The API has a collection of + [Workspace][google.area120.tables.v1alpha1.Workspace] resources, + named ``workspaces/*``. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "area120tables.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "area120tables.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_table(self) -> Callable[[tables.GetTableRequest], tables.Table]: + r"""Return a callable for the get table method over gRPC. + + Gets a table. Returns NOT_FOUND if the table does not exist. + + Returns: + Callable[[~.GetTableRequest], + ~.Table]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_table" not in self._stubs: + self._stubs["get_table"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/GetTable", + request_serializer=tables.GetTableRequest.serialize, + response_deserializer=tables.Table.deserialize, + ) + return self._stubs["get_table"] + + @property + def list_tables( + self, + ) -> Callable[[tables.ListTablesRequest], tables.ListTablesResponse]: + r"""Return a callable for the list tables method over gRPC. + + Lists tables for the user. + + Returns: + Callable[[~.ListTablesRequest], + ~.ListTablesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tables" not in self._stubs: + self._stubs["list_tables"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/ListTables", + request_serializer=tables.ListTablesRequest.serialize, + response_deserializer=tables.ListTablesResponse.deserialize, + ) + return self._stubs["list_tables"] + + @property + def get_workspace(self) -> Callable[[tables.GetWorkspaceRequest], tables.Workspace]: + r"""Return a callable for the get workspace method over gRPC. + + Gets a workspace. Returns NOT_FOUND if the workspace does not + exist. + + Returns: + Callable[[~.GetWorkspaceRequest], + ~.Workspace]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workspace" not in self._stubs: + self._stubs["get_workspace"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/GetWorkspace", + request_serializer=tables.GetWorkspaceRequest.serialize, + response_deserializer=tables.Workspace.deserialize, + ) + return self._stubs["get_workspace"] + + @property + def list_workspaces( + self, + ) -> Callable[[tables.ListWorkspacesRequest], tables.ListWorkspacesResponse]: + r"""Return a callable for the list workspaces method over gRPC. + + Lists workspaces for the user. + + Returns: + Callable[[~.ListWorkspacesRequest], + ~.ListWorkspacesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workspaces" not in self._stubs: + self._stubs["list_workspaces"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/ListWorkspaces", + request_serializer=tables.ListWorkspacesRequest.serialize, + response_deserializer=tables.ListWorkspacesResponse.deserialize, + ) + return self._stubs["list_workspaces"] + + @property + def get_row(self) -> Callable[[tables.GetRowRequest], tables.Row]: + r"""Return a callable for the get row method over gRPC. + + Gets a row. Returns NOT_FOUND if the row does not exist in the + table. + + Returns: + Callable[[~.GetRowRequest], + ~.Row]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_row" not in self._stubs: + self._stubs["get_row"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/GetRow", + request_serializer=tables.GetRowRequest.serialize, + response_deserializer=tables.Row.deserialize, + ) + return self._stubs["get_row"] + + @property + def list_rows(self) -> Callable[[tables.ListRowsRequest], tables.ListRowsResponse]: + r"""Return a callable for the list rows method over gRPC. + + Lists rows in a table. Returns NOT_FOUND if the table does not + exist. + + Returns: + Callable[[~.ListRowsRequest], + ~.ListRowsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_rows" not in self._stubs: + self._stubs["list_rows"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/ListRows", + request_serializer=tables.ListRowsRequest.serialize, + response_deserializer=tables.ListRowsResponse.deserialize, + ) + return self._stubs["list_rows"] + + @property + def create_row(self) -> Callable[[tables.CreateRowRequest], tables.Row]: + r"""Return a callable for the create row method over gRPC. + + Creates a row. + + Returns: + Callable[[~.CreateRowRequest], + ~.Row]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_row" not in self._stubs: + self._stubs["create_row"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/CreateRow", + request_serializer=tables.CreateRowRequest.serialize, + response_deserializer=tables.Row.deserialize, + ) + return self._stubs["create_row"] + + @property + def batch_create_rows( + self, + ) -> Callable[[tables.BatchCreateRowsRequest], tables.BatchCreateRowsResponse]: + r"""Return a callable for the batch create rows method over gRPC. + + Creates multiple rows. + + Returns: + Callable[[~.BatchCreateRowsRequest], + ~.BatchCreateRowsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_rows" not in self._stubs: + self._stubs["batch_create_rows"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/BatchCreateRows", + request_serializer=tables.BatchCreateRowsRequest.serialize, + response_deserializer=tables.BatchCreateRowsResponse.deserialize, + ) + return self._stubs["batch_create_rows"] + + @property + def update_row(self) -> Callable[[tables.UpdateRowRequest], tables.Row]: + r"""Return a callable for the update row method over gRPC. + + Updates a row. + + Returns: + Callable[[~.UpdateRowRequest], + ~.Row]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_row" not in self._stubs: + self._stubs["update_row"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/UpdateRow", + request_serializer=tables.UpdateRowRequest.serialize, + response_deserializer=tables.Row.deserialize, + ) + return self._stubs["update_row"] + + @property + def batch_update_rows( + self, + ) -> Callable[[tables.BatchUpdateRowsRequest], tables.BatchUpdateRowsResponse]: + r"""Return a callable for the batch update rows method over gRPC. + + Updates multiple rows. + + Returns: + Callable[[~.BatchUpdateRowsRequest], + ~.BatchUpdateRowsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_update_rows" not in self._stubs: + self._stubs["batch_update_rows"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/BatchUpdateRows", + request_serializer=tables.BatchUpdateRowsRequest.serialize, + response_deserializer=tables.BatchUpdateRowsResponse.deserialize, + ) + return self._stubs["batch_update_rows"] + + @property + def delete_row(self) -> Callable[[tables.DeleteRowRequest], empty_pb2.Empty]: + r"""Return a callable for the delete row method over gRPC. + + Deletes a row. + + Returns: + Callable[[~.DeleteRowRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_row" not in self._stubs: + self._stubs["delete_row"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/DeleteRow", + request_serializer=tables.DeleteRowRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_row"] + + @property + def batch_delete_rows( + self, + ) -> Callable[[tables.BatchDeleteRowsRequest], empty_pb2.Empty]: + r"""Return a callable for the batch delete rows method over gRPC. + + Deletes multiple rows. + + Returns: + Callable[[~.BatchDeleteRowsRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_delete_rows" not in self._stubs: + self._stubs["batch_delete_rows"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/BatchDeleteRows", + request_serializer=tables.BatchDeleteRowsRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["batch_delete_rows"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("TablesServiceGrpcTransport",) diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/grpc_asyncio.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..b6a337aa7194 --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/grpc_asyncio.py @@ -0,0 +1,565 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.area120.tables_v1alpha1.types import tables + +from .base import DEFAULT_CLIENT_INFO, TablesServiceTransport +from .grpc import TablesServiceGrpcTransport + + +class TablesServiceGrpcAsyncIOTransport(TablesServiceTransport): + """gRPC AsyncIO backend transport for TablesService. + + The Tables Service provides an API for reading and updating tables. + It defines the following resource model: + + - The API has a collection of + [Table][google.area120.tables.v1alpha1.Table] resources, named + ``tables/*`` + + - Each Table has a collection of + [Row][google.area120.tables.v1alpha1.Row] resources, named + ``tables/*/rows/*`` + + - The API has a collection of + [Workspace][google.area120.tables.v1alpha1.Workspace] resources, + named ``workspaces/*``. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "area120tables.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "area120tables.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_table(self) -> Callable[[tables.GetTableRequest], Awaitable[tables.Table]]: + r"""Return a callable for the get table method over gRPC. + + Gets a table. Returns NOT_FOUND if the table does not exist. + + Returns: + Callable[[~.GetTableRequest], + Awaitable[~.Table]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_table" not in self._stubs: + self._stubs["get_table"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/GetTable", + request_serializer=tables.GetTableRequest.serialize, + response_deserializer=tables.Table.deserialize, + ) + return self._stubs["get_table"] + + @property + def list_tables( + self, + ) -> Callable[[tables.ListTablesRequest], Awaitable[tables.ListTablesResponse]]: + r"""Return a callable for the list tables method over gRPC. + + Lists tables for the user. + + Returns: + Callable[[~.ListTablesRequest], + Awaitable[~.ListTablesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tables" not in self._stubs: + self._stubs["list_tables"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/ListTables", + request_serializer=tables.ListTablesRequest.serialize, + response_deserializer=tables.ListTablesResponse.deserialize, + ) + return self._stubs["list_tables"] + + @property + def get_workspace( + self, + ) -> Callable[[tables.GetWorkspaceRequest], Awaitable[tables.Workspace]]: + r"""Return a callable for the get workspace method over gRPC. + + Gets a workspace. Returns NOT_FOUND if the workspace does not + exist. + + Returns: + Callable[[~.GetWorkspaceRequest], + Awaitable[~.Workspace]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workspace" not in self._stubs: + self._stubs["get_workspace"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/GetWorkspace", + request_serializer=tables.GetWorkspaceRequest.serialize, + response_deserializer=tables.Workspace.deserialize, + ) + return self._stubs["get_workspace"] + + @property + def list_workspaces( + self, + ) -> Callable[ + [tables.ListWorkspacesRequest], Awaitable[tables.ListWorkspacesResponse] + ]: + r"""Return a callable for the list workspaces method over gRPC. + + Lists workspaces for the user. + + Returns: + Callable[[~.ListWorkspacesRequest], + Awaitable[~.ListWorkspacesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workspaces" not in self._stubs: + self._stubs["list_workspaces"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/ListWorkspaces", + request_serializer=tables.ListWorkspacesRequest.serialize, + response_deserializer=tables.ListWorkspacesResponse.deserialize, + ) + return self._stubs["list_workspaces"] + + @property + def get_row(self) -> Callable[[tables.GetRowRequest], Awaitable[tables.Row]]: + r"""Return a callable for the get row method over gRPC. + + Gets a row. Returns NOT_FOUND if the row does not exist in the + table. + + Returns: + Callable[[~.GetRowRequest], + Awaitable[~.Row]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_row" not in self._stubs: + self._stubs["get_row"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/GetRow", + request_serializer=tables.GetRowRequest.serialize, + response_deserializer=tables.Row.deserialize, + ) + return self._stubs["get_row"] + + @property + def list_rows( + self, + ) -> Callable[[tables.ListRowsRequest], Awaitable[tables.ListRowsResponse]]: + r"""Return a callable for the list rows method over gRPC. + + Lists rows in a table. Returns NOT_FOUND if the table does not + exist. + + Returns: + Callable[[~.ListRowsRequest], + Awaitable[~.ListRowsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_rows" not in self._stubs: + self._stubs["list_rows"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/ListRows", + request_serializer=tables.ListRowsRequest.serialize, + response_deserializer=tables.ListRowsResponse.deserialize, + ) + return self._stubs["list_rows"] + + @property + def create_row(self) -> Callable[[tables.CreateRowRequest], Awaitable[tables.Row]]: + r"""Return a callable for the create row method over gRPC. + + Creates a row. + + Returns: + Callable[[~.CreateRowRequest], + Awaitable[~.Row]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_row" not in self._stubs: + self._stubs["create_row"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/CreateRow", + request_serializer=tables.CreateRowRequest.serialize, + response_deserializer=tables.Row.deserialize, + ) + return self._stubs["create_row"] + + @property + def batch_create_rows( + self, + ) -> Callable[ + [tables.BatchCreateRowsRequest], Awaitable[tables.BatchCreateRowsResponse] + ]: + r"""Return a callable for the batch create rows method over gRPC. + + Creates multiple rows. + + Returns: + Callable[[~.BatchCreateRowsRequest], + Awaitable[~.BatchCreateRowsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_rows" not in self._stubs: + self._stubs["batch_create_rows"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/BatchCreateRows", + request_serializer=tables.BatchCreateRowsRequest.serialize, + response_deserializer=tables.BatchCreateRowsResponse.deserialize, + ) + return self._stubs["batch_create_rows"] + + @property + def update_row(self) -> Callable[[tables.UpdateRowRequest], Awaitable[tables.Row]]: + r"""Return a callable for the update row method over gRPC. + + Updates a row. + + Returns: + Callable[[~.UpdateRowRequest], + Awaitable[~.Row]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_row" not in self._stubs: + self._stubs["update_row"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/UpdateRow", + request_serializer=tables.UpdateRowRequest.serialize, + response_deserializer=tables.Row.deserialize, + ) + return self._stubs["update_row"] + + @property + def batch_update_rows( + self, + ) -> Callable[ + [tables.BatchUpdateRowsRequest], Awaitable[tables.BatchUpdateRowsResponse] + ]: + r"""Return a callable for the batch update rows method over gRPC. + + Updates multiple rows. + + Returns: + Callable[[~.BatchUpdateRowsRequest], + Awaitable[~.BatchUpdateRowsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_update_rows" not in self._stubs: + self._stubs["batch_update_rows"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/BatchUpdateRows", + request_serializer=tables.BatchUpdateRowsRequest.serialize, + response_deserializer=tables.BatchUpdateRowsResponse.deserialize, + ) + return self._stubs["batch_update_rows"] + + @property + def delete_row( + self, + ) -> Callable[[tables.DeleteRowRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete row method over gRPC. + + Deletes a row. + + Returns: + Callable[[~.DeleteRowRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_row" not in self._stubs: + self._stubs["delete_row"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/DeleteRow", + request_serializer=tables.DeleteRowRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_row"] + + @property + def batch_delete_rows( + self, + ) -> Callable[[tables.BatchDeleteRowsRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the batch delete rows method over gRPC. + + Deletes multiple rows. + + Returns: + Callable[[~.BatchDeleteRowsRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_delete_rows" not in self._stubs: + self._stubs["batch_delete_rows"] = self.grpc_channel.unary_unary( + "/google.area120.tables.v1alpha1.TablesService/BatchDeleteRows", + request_serializer=tables.BatchDeleteRowsRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["batch_delete_rows"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("TablesServiceGrpcAsyncIOTransport",) diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/rest.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/rest.py new file mode 100644 index 000000000000..9d7e26965d31 --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/rest.py @@ -0,0 +1,1647 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.area120.tables_v1alpha1.types import tables + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import TablesServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TablesServiceRestInterceptor: + """Interceptor for TablesService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TablesServiceRestTransport. + + .. code-block:: python + class MyCustomTablesServiceInterceptor(TablesServiceRestInterceptor): + def pre_batch_create_rows(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_rows(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_batch_delete_rows(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_batch_update_rows(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_update_rows(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_row(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_row(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_row(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_row(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_row(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_table(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_table(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_workspace(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_workspace(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_rows(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_rows(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_tables(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_tables(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_workspaces(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_workspaces(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_row(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_row(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TablesServiceRestTransport(interceptor=MyCustomTablesServiceInterceptor()) + client = TablesServiceClient(transport=transport) + + + """ + + def pre_batch_create_rows( + self, + request: tables.BatchCreateRowsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[tables.BatchCreateRowsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_create_rows + + Override in a subclass to manipulate the request or metadata + before they are sent to the TablesService server. + """ + return request, metadata + + def post_batch_create_rows( + self, response: tables.BatchCreateRowsResponse + ) -> tables.BatchCreateRowsResponse: + """Post-rpc interceptor for batch_create_rows + + Override in a subclass to manipulate the response + after it is returned by the TablesService server but before + it is returned to user code. + """ + return response + + def pre_batch_delete_rows( + self, + request: tables.BatchDeleteRowsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[tables.BatchDeleteRowsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_delete_rows + + Override in a subclass to manipulate the request or metadata + before they are sent to the TablesService server. + """ + return request, metadata + + def pre_batch_update_rows( + self, + request: tables.BatchUpdateRowsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[tables.BatchUpdateRowsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_update_rows + + Override in a subclass to manipulate the request or metadata + before they are sent to the TablesService server. + """ + return request, metadata + + def post_batch_update_rows( + self, response: tables.BatchUpdateRowsResponse + ) -> tables.BatchUpdateRowsResponse: + """Post-rpc interceptor for batch_update_rows + + Override in a subclass to manipulate the response + after it is returned by the TablesService server but before + it is returned to user code. + """ + return response + + def pre_create_row( + self, request: tables.CreateRowRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[tables.CreateRowRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_row + + Override in a subclass to manipulate the request or metadata + before they are sent to the TablesService server. + """ + return request, metadata + + def post_create_row(self, response: tables.Row) -> tables.Row: + """Post-rpc interceptor for create_row + + Override in a subclass to manipulate the response + after it is returned by the TablesService server but before + it is returned to user code. + """ + return response + + def pre_delete_row( + self, request: tables.DeleteRowRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[tables.DeleteRowRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_row + + Override in a subclass to manipulate the request or metadata + before they are sent to the TablesService server. + """ + return request, metadata + + def pre_get_row( + self, request: tables.GetRowRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[tables.GetRowRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_row + + Override in a subclass to manipulate the request or metadata + before they are sent to the TablesService server. + """ + return request, metadata + + def post_get_row(self, response: tables.Row) -> tables.Row: + """Post-rpc interceptor for get_row + + Override in a subclass to manipulate the response + after it is returned by the TablesService server but before + it is returned to user code. + """ + return response + + def pre_get_table( + self, request: tables.GetTableRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[tables.GetTableRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_table + + Override in a subclass to manipulate the request or metadata + before they are sent to the TablesService server. + """ + return request, metadata + + def post_get_table(self, response: tables.Table) -> tables.Table: + """Post-rpc interceptor for get_table + + Override in a subclass to manipulate the response + after it is returned by the TablesService server but before + it is returned to user code. + """ + return response + + def pre_get_workspace( + self, request: tables.GetWorkspaceRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[tables.GetWorkspaceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_workspace + + Override in a subclass to manipulate the request or metadata + before they are sent to the TablesService server. + """ + return request, metadata + + def post_get_workspace(self, response: tables.Workspace) -> tables.Workspace: + """Post-rpc interceptor for get_workspace + + Override in a subclass to manipulate the response + after it is returned by the TablesService server but before + it is returned to user code. + """ + return response + + def pre_list_rows( + self, request: tables.ListRowsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[tables.ListRowsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_rows + + Override in a subclass to manipulate the request or metadata + before they are sent to the TablesService server. + """ + return request, metadata + + def post_list_rows( + self, response: tables.ListRowsResponse + ) -> tables.ListRowsResponse: + """Post-rpc interceptor for list_rows + + Override in a subclass to manipulate the response + after it is returned by the TablesService server but before + it is returned to user code. + """ + return response + + def pre_list_tables( + self, request: tables.ListTablesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[tables.ListTablesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_tables + + Override in a subclass to manipulate the request or metadata + before they are sent to the TablesService server. + """ + return request, metadata + + def post_list_tables( + self, response: tables.ListTablesResponse + ) -> tables.ListTablesResponse: + """Post-rpc interceptor for list_tables + + Override in a subclass to manipulate the response + after it is returned by the TablesService server but before + it is returned to user code. + """ + return response + + def pre_list_workspaces( + self, request: tables.ListWorkspacesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[tables.ListWorkspacesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_workspaces + + Override in a subclass to manipulate the request or metadata + before they are sent to the TablesService server. + """ + return request, metadata + + def post_list_workspaces( + self, response: tables.ListWorkspacesResponse + ) -> tables.ListWorkspacesResponse: + """Post-rpc interceptor for list_workspaces + + Override in a subclass to manipulate the response + after it is returned by the TablesService server but before + it is returned to user code. + """ + return response + + def pre_update_row( + self, request: tables.UpdateRowRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[tables.UpdateRowRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_row + + Override in a subclass to manipulate the request or metadata + before they are sent to the TablesService server. + """ + return request, metadata + + def post_update_row(self, response: tables.Row) -> tables.Row: + """Post-rpc interceptor for update_row + + Override in a subclass to manipulate the response + after it is returned by the TablesService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TablesServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TablesServiceRestInterceptor + + +class TablesServiceRestTransport(TablesServiceTransport): + """REST backend transport for TablesService. + + The Tables Service provides an API for reading and updating tables. + It defines the following resource model: + + - The API has a collection of + [Table][google.area120.tables.v1alpha1.Table] resources, named + ``tables/*`` + + - Each Table has a collection of + [Row][google.area120.tables.v1alpha1.Row] resources, named + ``tables/*/rows/*`` + + - The API has a collection of + [Workspace][google.area120.tables.v1alpha1.Workspace] resources, + named ``workspaces/*``. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "area120tables.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[TablesServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TablesServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BatchCreateRows(TablesServiceRestStub): + def __hash__(self): + return hash("BatchCreateRows") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: tables.BatchCreateRowsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.BatchCreateRowsResponse: + r"""Call the batch create rows method over HTTP. + + Args: + request (~.tables.BatchCreateRowsRequest): + The request object. Request message for + TablesService.BatchCreateRows. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.tables.BatchCreateRowsResponse: + Response message for + TablesService.BatchCreateRows. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=tables/*}/rows:batchCreate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_create_rows( + request, metadata + ) + pb_request = tables.BatchCreateRowsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tables.BatchCreateRowsResponse() + pb_resp = tables.BatchCreateRowsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_create_rows(resp) + return resp + + class _BatchDeleteRows(TablesServiceRestStub): + def __hash__(self): + return hash("BatchDeleteRows") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: tables.BatchDeleteRowsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the batch delete rows method over HTTP. + + Args: + request (~.tables.BatchDeleteRowsRequest): + The request object. Request message for + TablesService.BatchDeleteRows + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=tables/*}/rows:batchDelete", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_delete_rows( + request, metadata + ) + pb_request = tables.BatchDeleteRowsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _BatchUpdateRows(TablesServiceRestStub): + def __hash__(self): + return hash("BatchUpdateRows") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: tables.BatchUpdateRowsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.BatchUpdateRowsResponse: + r"""Call the batch update rows method over HTTP. + + Args: + request (~.tables.BatchUpdateRowsRequest): + The request object. Request message for + TablesService.BatchUpdateRows. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.tables.BatchUpdateRowsResponse: + Response message for + TablesService.BatchUpdateRows. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=tables/*}/rows:batchUpdate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_update_rows( + request, metadata + ) + pb_request = tables.BatchUpdateRowsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tables.BatchUpdateRowsResponse() + pb_resp = tables.BatchUpdateRowsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_update_rows(resp) + return resp + + class _CreateRow(TablesServiceRestStub): + def __hash__(self): + return hash("CreateRow") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: tables.CreateRowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.Row: + r"""Call the create row method over HTTP. + + Args: + request (~.tables.CreateRowRequest): + The request object. Request message for + TablesService.CreateRow. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.tables.Row: + A single row in a table. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=tables/*}/rows", + "body": "row", + }, + ] + request, metadata = self._interceptor.pre_create_row(request, metadata) + pb_request = tables.CreateRowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tables.Row() + pb_resp = tables.Row.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_row(resp) + return resp + + class _DeleteRow(TablesServiceRestStub): + def __hash__(self): + return hash("DeleteRow") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: tables.DeleteRowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete row method over HTTP. + + Args: + request (~.tables.DeleteRowRequest): + The request object. Request message for + TablesService.DeleteRow + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=tables/*/rows/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_row(request, metadata) + pb_request = tables.DeleteRowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetRow(TablesServiceRestStub): + def __hash__(self): + return hash("GetRow") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: tables.GetRowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.Row: + r"""Call the get row method over HTTP. + + Args: + request (~.tables.GetRowRequest): + The request object. Request message for + TablesService.GetRow. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.tables.Row: + A single row in a table. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=tables/*/rows/*}", + }, + ] + request, metadata = self._interceptor.pre_get_row(request, metadata) + pb_request = tables.GetRowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tables.Row() + pb_resp = tables.Row.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_row(resp) + return resp + + class _GetTable(TablesServiceRestStub): + def __hash__(self): + return hash("GetTable") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: tables.GetTableRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.Table: + r"""Call the get table method over HTTP. + + Args: + request (~.tables.GetTableRequest): + The request object. Request message for + TablesService.GetTable. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.tables.Table: + A single table. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=tables/*}", + }, + ] + request, metadata = self._interceptor.pre_get_table(request, metadata) + pb_request = tables.GetTableRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tables.Table() + pb_resp = tables.Table.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_table(resp) + return resp + + class _GetWorkspace(TablesServiceRestStub): + def __hash__(self): + return hash("GetWorkspace") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: tables.GetWorkspaceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.Workspace: + r"""Call the get workspace method over HTTP. + + Args: + request (~.tables.GetWorkspaceRequest): + The request object. Request message for + TablesService.GetWorkspace. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.tables.Workspace: + A single workspace. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=workspaces/*}", + }, + ] + request, metadata = self._interceptor.pre_get_workspace(request, metadata) + pb_request = tables.GetWorkspaceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tables.Workspace() + pb_resp = tables.Workspace.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_workspace(resp) + return resp + + class _ListRows(TablesServiceRestStub): + def __hash__(self): + return hash("ListRows") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: tables.ListRowsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.ListRowsResponse: + r"""Call the list rows method over HTTP. + + Args: + request (~.tables.ListRowsRequest): + The request object. Request message for + TablesService.ListRows. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.tables.ListRowsResponse: + Response message for + TablesService.ListRows. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=tables/*}/rows", + }, + ] + request, metadata = self._interceptor.pre_list_rows(request, metadata) + pb_request = tables.ListRowsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tables.ListRowsResponse() + pb_resp = tables.ListRowsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_rows(resp) + return resp + + class _ListTables(TablesServiceRestStub): + def __hash__(self): + return hash("ListTables") + + def __call__( + self, + request: tables.ListTablesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.ListTablesResponse: + r"""Call the list tables method over HTTP. + + Args: + request (~.tables.ListTablesRequest): + The request object. Request message for + TablesService.ListTables. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.tables.ListTablesResponse: + Response message for + TablesService.ListTables. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/tables", + }, + ] + request, metadata = self._interceptor.pre_list_tables(request, metadata) + pb_request = tables.ListTablesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tables.ListTablesResponse() + pb_resp = tables.ListTablesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_tables(resp) + return resp + + class _ListWorkspaces(TablesServiceRestStub): + def __hash__(self): + return hash("ListWorkspaces") + + def __call__( + self, + request: tables.ListWorkspacesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.ListWorkspacesResponse: + r"""Call the list workspaces method over HTTP. + + Args: + request (~.tables.ListWorkspacesRequest): + The request object. Request message for + TablesService.ListWorkspaces. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.tables.ListWorkspacesResponse: + Response message for + TablesService.ListWorkspaces. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/workspaces", + }, + ] + request, metadata = self._interceptor.pre_list_workspaces(request, metadata) + pb_request = tables.ListWorkspacesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tables.ListWorkspacesResponse() + pb_resp = tables.ListWorkspacesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_workspaces(resp) + return resp + + class _UpdateRow(TablesServiceRestStub): + def __hash__(self): + return hash("UpdateRow") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: tables.UpdateRowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tables.Row: + r"""Call the update row method over HTTP. + + Args: + request (~.tables.UpdateRowRequest): + The request object. Request message for + TablesService.UpdateRow. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.tables.Row: + A single row in a table. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{row.name=tables/*/rows/*}", + "body": "row", + }, + ] + request, metadata = self._interceptor.pre_update_row(request, metadata) + pb_request = tables.UpdateRowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tables.Row() + pb_resp = tables.Row.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_row(resp) + return resp + + @property + def batch_create_rows( + self, + ) -> Callable[[tables.BatchCreateRowsRequest], tables.BatchCreateRowsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateRows(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_delete_rows( + self, + ) -> Callable[[tables.BatchDeleteRowsRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchDeleteRows(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_update_rows( + self, + ) -> Callable[[tables.BatchUpdateRowsRequest], tables.BatchUpdateRowsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchUpdateRows(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_row(self) -> Callable[[tables.CreateRowRequest], tables.Row]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateRow(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_row(self) -> Callable[[tables.DeleteRowRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteRow(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_row(self) -> Callable[[tables.GetRowRequest], tables.Row]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRow(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_table(self) -> Callable[[tables.GetTableRequest], tables.Table]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTable(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_workspace(self) -> Callable[[tables.GetWorkspaceRequest], tables.Workspace]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetWorkspace(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_rows(self) -> Callable[[tables.ListRowsRequest], tables.ListRowsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRows(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_tables( + self, + ) -> Callable[[tables.ListTablesRequest], tables.ListTablesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTables(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_workspaces( + self, + ) -> Callable[[tables.ListWorkspacesRequest], tables.ListWorkspacesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListWorkspaces(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_row(self) -> Callable[[tables.UpdateRowRequest], tables.Row]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateRow(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("TablesServiceRestTransport",) diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/types/__init__.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/types/__init__.py new file mode 100644 index 000000000000..b5b4ad45b73b --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/types/__init__.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .tables import ( + BatchCreateRowsRequest, + BatchCreateRowsResponse, + BatchDeleteRowsRequest, + BatchUpdateRowsRequest, + BatchUpdateRowsResponse, + ColumnDescription, + CreateRowRequest, + DeleteRowRequest, + GetRowRequest, + GetTableRequest, + GetWorkspaceRequest, + LabeledItem, + ListRowsRequest, + ListRowsResponse, + ListTablesRequest, + ListTablesResponse, + ListWorkspacesRequest, + ListWorkspacesResponse, + LookupDetails, + RelationshipDetails, + Row, + Table, + UpdateRowRequest, + View, + Workspace, +) + +__all__ = ( + "BatchCreateRowsRequest", + "BatchCreateRowsResponse", + "BatchDeleteRowsRequest", + "BatchUpdateRowsRequest", + "BatchUpdateRowsResponse", + "ColumnDescription", + "CreateRowRequest", + "DeleteRowRequest", + "GetRowRequest", + "GetTableRequest", + "GetWorkspaceRequest", + "LabeledItem", + "ListRowsRequest", + "ListRowsResponse", + "ListTablesRequest", + "ListTablesResponse", + "ListWorkspacesRequest", + "ListWorkspacesResponse", + "LookupDetails", + "RelationshipDetails", + "Row", + "Table", + "UpdateRowRequest", + "Workspace", + "View", +) diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/types/tables.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/types/tables.py new file mode 100644 index 000000000000..3f9eed9a806a --- /dev/null +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/types/tables.py @@ -0,0 +1,701 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.area120.tables.v1alpha1", + manifest={ + "View", + "GetTableRequest", + "ListTablesRequest", + "ListTablesResponse", + "GetWorkspaceRequest", + "ListWorkspacesRequest", + "ListWorkspacesResponse", + "GetRowRequest", + "ListRowsRequest", + "ListRowsResponse", + "CreateRowRequest", + "BatchCreateRowsRequest", + "BatchCreateRowsResponse", + "UpdateRowRequest", + "BatchUpdateRowsRequest", + "BatchUpdateRowsResponse", + "DeleteRowRequest", + "BatchDeleteRowsRequest", + "Table", + "ColumnDescription", + "LabeledItem", + "RelationshipDetails", + "LookupDetails", + "Row", + "Workspace", + }, +) + + +class View(proto.Enum): + r"""Column identifier used for the values in the row. + + Values: + VIEW_UNSPECIFIED (0): + Defaults to user entered text. + COLUMN_ID_VIEW (1): + Uses internally generated column id to + identify values. + """ + VIEW_UNSPECIFIED = 0 + COLUMN_ID_VIEW = 1 + + +class GetTableRequest(proto.Message): + r"""Request message for TablesService.GetTable. + + Attributes: + name (str): + Required. The name of the table to retrieve. + Format: tables/{table} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTablesRequest(proto.Message): + r"""Request message for TablesService.ListTables. + + Attributes: + page_size (int): + The maximum number of tables to return. The + service may return fewer than this value. + + If unspecified, at most 20 tables are returned. + The maximum value is 100; values above 100 are + coerced to 100. + page_token (str): + A page token, received from a previous ``ListTables`` call. + Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListTables`` must match the call that provided the page + token. + """ + + page_size: int = proto.Field( + proto.INT32, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListTablesResponse(proto.Message): + r"""Response message for TablesService.ListTables. + + Attributes: + tables (MutableSequence[google.area120.tables_v1alpha1.types.Table]): + The list of tables. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is empty, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + tables: MutableSequence["Table"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Table", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetWorkspaceRequest(proto.Message): + r"""Request message for TablesService.GetWorkspace. + + Attributes: + name (str): + Required. The name of the workspace to + retrieve. Format: workspaces/{workspace} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListWorkspacesRequest(proto.Message): + r"""Request message for TablesService.ListWorkspaces. + + Attributes: + page_size (int): + The maximum number of workspaces to return. + The service may return fewer than this value. + If unspecified, at most 10 workspaces are + returned. The maximum value is 25; values above + 25 are coerced to 25. + page_token (str): + A page token, received from a previous ``ListWorkspaces`` + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListWorkspaces`` must match the call that provided the + page token. + """ + + page_size: int = proto.Field( + proto.INT32, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListWorkspacesResponse(proto.Message): + r"""Response message for TablesService.ListWorkspaces. + + Attributes: + workspaces (MutableSequence[google.area120.tables_v1alpha1.types.Workspace]): + The list of workspaces. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is empty, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + workspaces: MutableSequence["Workspace"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Workspace", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetRowRequest(proto.Message): + r"""Request message for TablesService.GetRow. + + Attributes: + name (str): + Required. The name of the row to retrieve. + Format: tables/{table}/rows/{row} + view (google.area120.tables_v1alpha1.types.View): + Optional. Column key to use for values in the + row. Defaults to user entered name. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: "View" = proto.Field( + proto.ENUM, + number=2, + enum="View", + ) + + +class ListRowsRequest(proto.Message): + r"""Request message for TablesService.ListRows. + + Attributes: + parent (str): + Required. The parent table. + Format: tables/{table} + page_size (int): + The maximum number of rows to return. The + service may return fewer than this value. + + If unspecified, at most 50 rows are returned. + The maximum value is 1,000; values above 1,000 + are coerced to 1,000. + page_token (str): + A page token, received from a previous ``ListRows`` call. + Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListRows`` must match the call that provided the page + token. + view (google.area120.tables_v1alpha1.types.View): + Optional. Column key to use for values in the + row. Defaults to user entered name. + filter (str): + Optional. Raw text query to search for in + rows of the table. Special characters must be + escaped. Logical operators and field specific + filtering not supported. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + view: "View" = proto.Field( + proto.ENUM, + number=4, + enum="View", + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListRowsResponse(proto.Message): + r"""Response message for TablesService.ListRows. + + Attributes: + rows (MutableSequence[google.area120.tables_v1alpha1.types.Row]): + The rows from the specified table. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is empty, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + rows: MutableSequence["Row"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Row", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateRowRequest(proto.Message): + r"""Request message for TablesService.CreateRow. + + Attributes: + parent (str): + Required. The parent table where this row + will be created. Format: tables/{table} + row (google.area120.tables_v1alpha1.types.Row): + Required. The row to create. + view (google.area120.tables_v1alpha1.types.View): + Optional. Column key to use for values in the + row. Defaults to user entered name. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + row: "Row" = proto.Field( + proto.MESSAGE, + number=2, + message="Row", + ) + view: "View" = proto.Field( + proto.ENUM, + number=3, + enum="View", + ) + + +class BatchCreateRowsRequest(proto.Message): + r"""Request message for TablesService.BatchCreateRows. + + Attributes: + parent (str): + Required. The parent table where the rows + will be created. Format: tables/{table} + requests (MutableSequence[google.area120.tables_v1alpha1.types.CreateRowRequest]): + Required. The request message specifying the + rows to create. + A maximum of 500 rows can be created in a single + batch. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["CreateRowRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CreateRowRequest", + ) + + +class BatchCreateRowsResponse(proto.Message): + r"""Response message for TablesService.BatchCreateRows. + + Attributes: + rows (MutableSequence[google.area120.tables_v1alpha1.types.Row]): + The created rows. + """ + + rows: MutableSequence["Row"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Row", + ) + + +class UpdateRowRequest(proto.Message): + r"""Request message for TablesService.UpdateRow. + + Attributes: + row (google.area120.tables_v1alpha1.types.Row): + Required. The row to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. + view (google.area120.tables_v1alpha1.types.View): + Optional. Column key to use for values in the + row. Defaults to user entered name. + """ + + row: "Row" = proto.Field( + proto.MESSAGE, + number=1, + message="Row", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + view: "View" = proto.Field( + proto.ENUM, + number=3, + enum="View", + ) + + +class BatchUpdateRowsRequest(proto.Message): + r"""Request message for TablesService.BatchUpdateRows. + + Attributes: + parent (str): + Required. The parent table shared by all rows + being updated. Format: tables/{table} + requests (MutableSequence[google.area120.tables_v1alpha1.types.UpdateRowRequest]): + Required. The request messages specifying the + rows to update. + A maximum of 500 rows can be modified in a + single batch. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["UpdateRowRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="UpdateRowRequest", + ) + + +class BatchUpdateRowsResponse(proto.Message): + r"""Response message for TablesService.BatchUpdateRows. + + Attributes: + rows (MutableSequence[google.area120.tables_v1alpha1.types.Row]): + The updated rows. + """ + + rows: MutableSequence["Row"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Row", + ) + + +class DeleteRowRequest(proto.Message): + r"""Request message for TablesService.DeleteRow + + Attributes: + name (str): + Required. The name of the row to delete. + Format: tables/{table}/rows/{row} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BatchDeleteRowsRequest(proto.Message): + r"""Request message for TablesService.BatchDeleteRows + + Attributes: + parent (str): + Required. The parent table shared by all rows + being deleted. Format: tables/{table} + names (MutableSequence[str]): + Required. The names of the rows to delete. + All rows must belong to the parent table or else + the entire batch will fail. A maximum of 500 + rows can be deleted in a batch. + Format: tables/{table}/rows/{row} + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class Table(proto.Message): + r"""A single table. + + Attributes: + name (str): + The resource name of the table. Table names have the form + ``tables/{table}``. + display_name (str): + The human readable title of the table. + columns (MutableSequence[google.area120.tables_v1alpha1.types.ColumnDescription]): + List of columns in this table. + Order of columns matches the display order. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + columns: MutableSequence["ColumnDescription"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="ColumnDescription", + ) + + +class ColumnDescription(proto.Message): + r"""Details on a column in the table. + + Attributes: + name (str): + column name + data_type (str): + Data type of the column Supported types are auto_id, + boolean, boolean_list, creator, create_timestamp, date, + dropdown, location, integer, integer_list, number, + number_list, person, person_list, tags, check_list, text, + text_list, update_timestamp, updater, relationship, + file_attachment_list. These types directly map to the column + types supported on Tables website. + id (str): + Internal id for a column. + labels (MutableSequence[google.area120.tables_v1alpha1.types.LabeledItem]): + Optional. Range of labeled values for the + column. Some columns like tags and drop-downs + limit the values to a set of possible values. We + return the range of values in such cases to help + clients implement better user data validation. + relationship_details (google.area120.tables_v1alpha1.types.RelationshipDetails): + Optional. Additional details about a relationship column. + Specified when data_type is relationship. + lookup_details (google.area120.tables_v1alpha1.types.LookupDetails): + Optional. Indicates that this is a lookup + column whose value is derived from the + relationship column specified in the details. + Lookup columns can not be updated directly. To + change the value you must update the associated + relationship column. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + data_type: str = proto.Field( + proto.STRING, + number=2, + ) + id: str = proto.Field( + proto.STRING, + number=3, + ) + labels: MutableSequence["LabeledItem"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="LabeledItem", + ) + relationship_details: "RelationshipDetails" = proto.Field( + proto.MESSAGE, + number=5, + message="RelationshipDetails", + ) + lookup_details: "LookupDetails" = proto.Field( + proto.MESSAGE, + number=6, + message="LookupDetails", + ) + + +class LabeledItem(proto.Message): + r"""A single item in a labeled column. + + Attributes: + name (str): + Display string as entered by user. + id (str): + Internal id associated with the item. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RelationshipDetails(proto.Message): + r"""Details about a relationship column. + + Attributes: + linked_table (str): + The name of the table this relationship is + linked to. + """ + + linked_table: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LookupDetails(proto.Message): + r"""Details about a lookup column whose value comes from the + associated relationship. + + Attributes: + relationship_column (str): + The name of the relationship column + associated with the lookup. + relationship_column_id (str): + The id of the relationship column. + """ + + relationship_column: str = proto.Field( + proto.STRING, + number=1, + ) + relationship_column_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Row(proto.Message): + r"""A single row in a table. + + Attributes: + name (str): + The resource name of the row. Row names have the form + ``tables/{table}/rows/{row}``. The name is ignored when + creating a row. + values (MutableMapping[str, google.protobuf.struct_pb2.Value]): + The values of the row. This is a map of + column key to value. Key is user entered + name(default) or the internal column id based on + the view in the request. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + values: MutableMapping[str, struct_pb2.Value] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message=struct_pb2.Value, + ) + + +class Workspace(proto.Message): + r"""A single workspace. + + Attributes: + name (str): + The resource name of the workspace. Workspace names have the + form ``workspaces/{workspace}``. + display_name (str): + The human readable title of the workspace. + tables (MutableSequence[google.area120.tables_v1alpha1.types.Table]): + The list of tables in the workspace. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + tables: MutableSequence["Table"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Table", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-area120-tables/mypy.ini b/packages/google-area120-tables/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-area120-tables/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-area120-tables/noxfile.py b/packages/google-area120-tables/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-area120-tables/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-area120-tables/renovate.json b/packages/google-area120-tables/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-area120-tables/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-area120-tables/scripts/decrypt-secrets.sh b/packages/google-area120-tables/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-area120-tables/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-area120-tables/scripts/fixup_tables_v1alpha1_keywords.py b/packages/google-area120-tables/scripts/fixup_tables_v1alpha1_keywords.py new file mode 100644 index 000000000000..2a83d5813046 --- /dev/null +++ b/packages/google-area120-tables/scripts/fixup_tables_v1alpha1_keywords.py @@ -0,0 +1,187 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class tablesCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_create_rows': ('parent', 'requests', ), + 'batch_delete_rows': ('parent', 'names', ), + 'batch_update_rows': ('parent', 'requests', ), + 'create_row': ('parent', 'row', 'view', ), + 'delete_row': ('name', ), + 'get_row': ('name', 'view', ), + 'get_table': ('name', ), + 'get_workspace': ('name', ), + 'list_rows': ('parent', 'page_size', 'page_token', 'view', 'filter', ), + 'list_tables': ('page_size', 'page_token', ), + 'list_workspaces': ('page_size', 'page_token', ), + 'update_row': ('row', 'update_mask', 'view', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=tablesCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the tables client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-area120-tables/scripts/readme-gen/readme_gen.py b/packages/google-area120-tables/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..91b59676bfc7 --- /dev/null +++ b/packages/google-area120-tables/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template('README.tmpl.rst') + + +def get_help(file): + return subprocess.check_output(['python', file, '--help']).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('source') + parser.add_argument('--destination', default='README.rst') + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals['get_help'] = get_help + + with io.open(source, 'r') as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, 'w') as f: + f.write(output) + + +if __name__ == '__main__': + main() diff --git a/packages/google-area120-tables/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-area120-tables/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-area120-tables/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-area120-tables/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-area120-tables/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-area120-tables/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-area120-tables/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-area120-tables/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-area120-tables/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-area120-tables/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-area120-tables/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-area120-tables/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-area120-tables/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-area120-tables/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-area120-tables/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-area120-tables/setup.cfg b/packages/google-area120-tables/setup.cfg new file mode 100644 index 000000000000..c3a2b39f6528 --- /dev/null +++ b/packages/google-area120-tables/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-area120-tables/setup.py b/packages/google-area120-tables/setup.py new file mode 100644 index 000000000000..b71a903c1b59 --- /dev/null +++ b/packages/google-area120-tables/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-area120-tables" + + +description = "Google Area120 Tables API client library" + +version = {} +with open(os.path.join(package_root, "google/area120/tables/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.area120"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-area120-tables/testing/.gitignore b/packages/google-area120-tables/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-area120-tables/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-area120-tables/testing/constraints-3.10.txt b/packages/google-area120-tables/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-area120-tables/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-area120-tables/testing/constraints-3.11.txt b/packages/google-area120-tables/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-area120-tables/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-area120-tables/testing/constraints-3.12.txt b/packages/google-area120-tables/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-area120-tables/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-area120-tables/testing/constraints-3.7.txt b/packages/google-area120-tables/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-area120-tables/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-area120-tables/testing/constraints-3.8.txt b/packages/google-area120-tables/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-area120-tables/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-area120-tables/testing/constraints-3.9.txt b/packages/google-area120-tables/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-area120-tables/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-area120-tables/tests/__init__.py b/packages/google-area120-tables/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-area120-tables/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-area120-tables/tests/unit/__init__.py b/packages/google-area120-tables/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-area120-tables/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-area120-tables/tests/unit/gapic/__init__.py b/packages/google-area120-tables/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-area120-tables/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/__init__.py b/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py b/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py new file mode 100644 index 000000000000..ba6cf4acf16f --- /dev/null +++ b/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py @@ -0,0 +1,7311 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import struct_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.area120.tables_v1alpha1.services.tables_service import ( + TablesServiceAsyncClient, + TablesServiceClient, + pagers, + transports, +) +from google.area120.tables_v1alpha1.types import tables + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TablesServiceClient._get_default_mtls_endpoint(None) is None + assert ( + TablesServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + TablesServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + TablesServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + TablesServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + TablesServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TablesServiceClient, "grpc"), + (TablesServiceAsyncClient, "grpc_asyncio"), + (TablesServiceClient, "rest"), + ], +) +def test_tables_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "area120tables.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://area120tables.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.TablesServiceGrpcTransport, "grpc"), + (transports.TablesServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.TablesServiceRestTransport, "rest"), + ], +) +def test_tables_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TablesServiceClient, "grpc"), + (TablesServiceAsyncClient, "grpc_asyncio"), + (TablesServiceClient, "rest"), + ], +) +def test_tables_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "area120tables.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://area120tables.googleapis.com" + ) + + +def test_tables_service_client_get_transport_class(): + transport = TablesServiceClient.get_transport_class() + available_transports = [ + transports.TablesServiceGrpcTransport, + transports.TablesServiceRestTransport, + ] + assert transport in available_transports + + transport = TablesServiceClient.get_transport_class("grpc") + assert transport == transports.TablesServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (TablesServiceClient, transports.TablesServiceGrpcTransport, "grpc"), + ( + TablesServiceAsyncClient, + transports.TablesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (TablesServiceClient, transports.TablesServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + TablesServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TablesServiceClient), +) +@mock.patch.object( + TablesServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TablesServiceAsyncClient), +) +def test_tables_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TablesServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TablesServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (TablesServiceClient, transports.TablesServiceGrpcTransport, "grpc", "true"), + ( + TablesServiceAsyncClient, + transports.TablesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (TablesServiceClient, transports.TablesServiceGrpcTransport, "grpc", "false"), + ( + TablesServiceAsyncClient, + transports.TablesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (TablesServiceClient, transports.TablesServiceRestTransport, "rest", "true"), + (TablesServiceClient, transports.TablesServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + TablesServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TablesServiceClient), +) +@mock.patch.object( + TablesServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TablesServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_tables_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [TablesServiceClient, TablesServiceAsyncClient] +) +@mock.patch.object( + TablesServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TablesServiceClient), +) +@mock.patch.object( + TablesServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TablesServiceAsyncClient), +) +def test_tables_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (TablesServiceClient, transports.TablesServiceGrpcTransport, "grpc"), + ( + TablesServiceAsyncClient, + transports.TablesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (TablesServiceClient, transports.TablesServiceRestTransport, "rest"), + ], +) +def test_tables_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TablesServiceClient, + transports.TablesServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TablesServiceAsyncClient, + transports.TablesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (TablesServiceClient, transports.TablesServiceRestTransport, "rest", None), + ], +) +def test_tables_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_tables_service_client_client_options_from_dict(): + with mock.patch( + "google.area120.tables_v1alpha1.services.tables_service.transports.TablesServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = TablesServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TablesServiceClient, + transports.TablesServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TablesServiceAsyncClient, + transports.TablesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_tables_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "area120tables.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file", + "https://www.googleapis.com/auth/drive.readonly", + "https://www.googleapis.com/auth/spreadsheets", + "https://www.googleapis.com/auth/spreadsheets.readonly", + "https://www.googleapis.com/auth/tables", + ), + scopes=None, + default_host="area120tables.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tables.GetTableRequest, + dict, + ], +) +def test_get_table(request_type, transport: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.Table( + name="name_value", + display_name="display_name_value", + ) + response = client.get_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == tables.GetTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.Table) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_table_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + client.get_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == tables.GetTableRequest() + + +@pytest.mark.asyncio +async def test_get_table_async( + transport: str = "grpc_asyncio", request_type=tables.GetTableRequest +): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tables.Table( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == tables.GetTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.Table) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_table_async_from_dict(): + await test_get_table_async(request_type=dict) + + +def test_get_table_field_headers(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.GetTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + call.return_value = tables.Table() + client.get_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_table_field_headers_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.GetTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tables.Table()) + await client.get_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_table_flattened(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.Table() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_table( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_table_flattened_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_table( + tables.GetTableRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_table_flattened_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.Table() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tables.Table()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_table( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_table_flattened_error_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_table( + tables.GetTableRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tables.ListTablesRequest, + dict, + ], +) +def test_list_tables(request_type, transport: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.ListTablesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == tables.ListTablesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTablesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tables_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + client.list_tables() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == tables.ListTablesRequest() + + +@pytest.mark.asyncio +async def test_list_tables_async( + transport: str = "grpc_asyncio", request_type=tables.ListTablesRequest +): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tables.ListTablesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == tables.ListTablesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTablesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_tables_async_from_dict(): + await test_list_tables_async(request_type=dict) + + +def test_list_tables_pager(transport_name: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + tables.ListTablesResponse( + tables=[ + tables.Table(), + tables.Table(), + tables.Table(), + ], + next_page_token="abc", + ), + tables.ListTablesResponse( + tables=[], + next_page_token="def", + ), + tables.ListTablesResponse( + tables=[ + tables.Table(), + ], + next_page_token="ghi", + ), + tables.ListTablesResponse( + tables=[ + tables.Table(), + tables.Table(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_tables(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tables.Table) for i in results) + + +def test_list_tables_pages(transport_name: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + tables.ListTablesResponse( + tables=[ + tables.Table(), + tables.Table(), + tables.Table(), + ], + next_page_token="abc", + ), + tables.ListTablesResponse( + tables=[], + next_page_token="def", + ), + tables.ListTablesResponse( + tables=[ + tables.Table(), + ], + next_page_token="ghi", + ), + tables.ListTablesResponse( + tables=[ + tables.Table(), + tables.Table(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tables(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tables_async_pager(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tables), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + tables.ListTablesResponse( + tables=[ + tables.Table(), + tables.Table(), + tables.Table(), + ], + next_page_token="abc", + ), + tables.ListTablesResponse( + tables=[], + next_page_token="def", + ), + tables.ListTablesResponse( + tables=[ + tables.Table(), + ], + next_page_token="ghi", + ), + tables.ListTablesResponse( + tables=[ + tables.Table(), + tables.Table(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tables( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, tables.Table) for i in responses) + + +@pytest.mark.asyncio +async def test_list_tables_async_pages(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tables), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + tables.ListTablesResponse( + tables=[ + tables.Table(), + tables.Table(), + tables.Table(), + ], + next_page_token="abc", + ), + tables.ListTablesResponse( + tables=[], + next_page_token="def", + ), + tables.ListTablesResponse( + tables=[ + tables.Table(), + ], + next_page_token="ghi", + ), + tables.ListTablesResponse( + tables=[ + tables.Table(), + tables.Table(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_tables(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + tables.GetWorkspaceRequest, + dict, + ], +) +def test_get_workspace(request_type, transport: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workspace), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.Workspace( + name="name_value", + display_name="display_name_value", + ) + response = client.get_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == tables.GetWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.Workspace) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workspace), "__call__") as call: + client.get_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == tables.GetWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_get_workspace_async( + transport: str = "grpc_asyncio", request_type=tables.GetWorkspaceRequest +): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workspace), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tables.Workspace( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == tables.GetWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.Workspace) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_workspace_async_from_dict(): + await test_get_workspace_async(request_type=dict) + + +def test_get_workspace_field_headers(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.GetWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workspace), "__call__") as call: + call.return_value = tables.Workspace() + client.get_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_workspace_field_headers_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.GetWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workspace), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tables.Workspace()) + await client.get_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_workspace_flattened(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workspace), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.Workspace() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_workspace( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_workspace_flattened_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workspace( + tables.GetWorkspaceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_workspace_flattened_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workspace), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.Workspace() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tables.Workspace()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_workspace( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_workspace_flattened_error_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_workspace( + tables.GetWorkspaceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tables.ListWorkspacesRequest, + dict, + ], +) +def test_list_workspaces(request_type, transport: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workspaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.ListWorkspacesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == tables.ListWorkspacesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkspacesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_workspaces_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workspaces), "__call__") as call: + client.list_workspaces() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == tables.ListWorkspacesRequest() + + +@pytest.mark.asyncio +async def test_list_workspaces_async( + transport: str = "grpc_asyncio", request_type=tables.ListWorkspacesRequest +): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workspaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tables.ListWorkspacesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == tables.ListWorkspacesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkspacesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_workspaces_async_from_dict(): + await test_list_workspaces_async(request_type=dict) + + +def test_list_workspaces_pager(transport_name: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workspaces), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + tables.ListWorkspacesResponse( + workspaces=[ + tables.Workspace(), + tables.Workspace(), + tables.Workspace(), + ], + next_page_token="abc", + ), + tables.ListWorkspacesResponse( + workspaces=[], + next_page_token="def", + ), + tables.ListWorkspacesResponse( + workspaces=[ + tables.Workspace(), + ], + next_page_token="ghi", + ), + tables.ListWorkspacesResponse( + workspaces=[ + tables.Workspace(), + tables.Workspace(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_workspaces(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tables.Workspace) for i in results) + + +def test_list_workspaces_pages(transport_name: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workspaces), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + tables.ListWorkspacesResponse( + workspaces=[ + tables.Workspace(), + tables.Workspace(), + tables.Workspace(), + ], + next_page_token="abc", + ), + tables.ListWorkspacesResponse( + workspaces=[], + next_page_token="def", + ), + tables.ListWorkspacesResponse( + workspaces=[ + tables.Workspace(), + ], + next_page_token="ghi", + ), + tables.ListWorkspacesResponse( + workspaces=[ + tables.Workspace(), + tables.Workspace(), + ], + ), + RuntimeError, + ) + pages = list(client.list_workspaces(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_workspaces_async_pager(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workspaces), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + tables.ListWorkspacesResponse( + workspaces=[ + tables.Workspace(), + tables.Workspace(), + tables.Workspace(), + ], + next_page_token="abc", + ), + tables.ListWorkspacesResponse( + workspaces=[], + next_page_token="def", + ), + tables.ListWorkspacesResponse( + workspaces=[ + tables.Workspace(), + ], + next_page_token="ghi", + ), + tables.ListWorkspacesResponse( + workspaces=[ + tables.Workspace(), + tables.Workspace(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_workspaces( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, tables.Workspace) for i in responses) + + +@pytest.mark.asyncio +async def test_list_workspaces_async_pages(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workspaces), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + tables.ListWorkspacesResponse( + workspaces=[ + tables.Workspace(), + tables.Workspace(), + tables.Workspace(), + ], + next_page_token="abc", + ), + tables.ListWorkspacesResponse( + workspaces=[], + next_page_token="def", + ), + tables.ListWorkspacesResponse( + workspaces=[ + tables.Workspace(), + ], + next_page_token="ghi", + ), + tables.ListWorkspacesResponse( + workspaces=[ + tables.Workspace(), + tables.Workspace(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_workspaces(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + tables.GetRowRequest, + dict, + ], +) +def test_get_row(request_type, transport: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.Row( + name="name_value", + ) + response = client.get_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == tables.GetRowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.Row) + assert response.name == "name_value" + + +def test_get_row_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_row), "__call__") as call: + client.get_row() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == tables.GetRowRequest() + + +@pytest.mark.asyncio +async def test_get_row_async( + transport: str = "grpc_asyncio", request_type=tables.GetRowRequest +): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tables.Row( + name="name_value", + ) + ) + response = await client.get_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == tables.GetRowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.Row) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_row_async_from_dict(): + await test_get_row_async(request_type=dict) + + +def test_get_row_field_headers(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.GetRowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_row), "__call__") as call: + call.return_value = tables.Row() + client.get_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_row_field_headers_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.GetRowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_row), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tables.Row()) + await client.get_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_row_flattened(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.Row() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_row( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_row_flattened_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_row( + tables.GetRowRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_row_flattened_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.Row() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tables.Row()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_row( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_row_flattened_error_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_row( + tables.GetRowRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tables.ListRowsRequest, + dict, + ], +) +def test_list_rows(request_type, transport: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_rows), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.ListRowsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == tables.ListRowsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRowsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_rows_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_rows), "__call__") as call: + client.list_rows() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == tables.ListRowsRequest() + + +@pytest.mark.asyncio +async def test_list_rows_async( + transport: str = "grpc_asyncio", request_type=tables.ListRowsRequest +): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_rows), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tables.ListRowsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == tables.ListRowsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRowsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_rows_async_from_dict(): + await test_list_rows_async(request_type=dict) + + +def test_list_rows_field_headers(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.ListRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_rows), "__call__") as call: + call.return_value = tables.ListRowsResponse() + client.list_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_rows_field_headers_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.ListRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_rows), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tables.ListRowsResponse() + ) + await client.list_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_rows_flattened(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_rows), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.ListRowsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_rows( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_rows_flattened_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_rows( + tables.ListRowsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_rows_flattened_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_rows), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.ListRowsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tables.ListRowsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_rows( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_rows_flattened_error_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_rows( + tables.ListRowsRequest(), + parent="parent_value", + ) + + +def test_list_rows_pager(transport_name: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_rows), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + tables.ListRowsResponse( + rows=[ + tables.Row(), + tables.Row(), + tables.Row(), + ], + next_page_token="abc", + ), + tables.ListRowsResponse( + rows=[], + next_page_token="def", + ), + tables.ListRowsResponse( + rows=[ + tables.Row(), + ], + next_page_token="ghi", + ), + tables.ListRowsResponse( + rows=[ + tables.Row(), + tables.Row(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_rows(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tables.Row) for i in results) + + +def test_list_rows_pages(transport_name: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_rows), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + tables.ListRowsResponse( + rows=[ + tables.Row(), + tables.Row(), + tables.Row(), + ], + next_page_token="abc", + ), + tables.ListRowsResponse( + rows=[], + next_page_token="def", + ), + tables.ListRowsResponse( + rows=[ + tables.Row(), + ], + next_page_token="ghi", + ), + tables.ListRowsResponse( + rows=[ + tables.Row(), + tables.Row(), + ], + ), + RuntimeError, + ) + pages = list(client.list_rows(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_rows_async_pager(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rows), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + tables.ListRowsResponse( + rows=[ + tables.Row(), + tables.Row(), + tables.Row(), + ], + next_page_token="abc", + ), + tables.ListRowsResponse( + rows=[], + next_page_token="def", + ), + tables.ListRowsResponse( + rows=[ + tables.Row(), + ], + next_page_token="ghi", + ), + tables.ListRowsResponse( + rows=[ + tables.Row(), + tables.Row(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_rows( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, tables.Row) for i in responses) + + +@pytest.mark.asyncio +async def test_list_rows_async_pages(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rows), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + tables.ListRowsResponse( + rows=[ + tables.Row(), + tables.Row(), + tables.Row(), + ], + next_page_token="abc", + ), + tables.ListRowsResponse( + rows=[], + next_page_token="def", + ), + tables.ListRowsResponse( + rows=[ + tables.Row(), + ], + next_page_token="ghi", + ), + tables.ListRowsResponse( + rows=[ + tables.Row(), + tables.Row(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_rows(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + tables.CreateRowRequest, + dict, + ], +) +def test_create_row(request_type, transport: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.Row( + name="name_value", + ) + response = client.create_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == tables.CreateRowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.Row) + assert response.name == "name_value" + + +def test_create_row_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_row), "__call__") as call: + client.create_row() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == tables.CreateRowRequest() + + +@pytest.mark.asyncio +async def test_create_row_async( + transport: str = "grpc_asyncio", request_type=tables.CreateRowRequest +): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tables.Row( + name="name_value", + ) + ) + response = await client.create_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == tables.CreateRowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.Row) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_create_row_async_from_dict(): + await test_create_row_async(request_type=dict) + + +def test_create_row_field_headers(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.CreateRowRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_row), "__call__") as call: + call.return_value = tables.Row() + client.create_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_row_field_headers_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.CreateRowRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_row), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tables.Row()) + await client.create_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_row_flattened(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.Row() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_row( + parent="parent_value", + row=tables.Row(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].row + mock_val = tables.Row(name="name_value") + assert arg == mock_val + + +def test_create_row_flattened_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_row( + tables.CreateRowRequest(), + parent="parent_value", + row=tables.Row(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_row_flattened_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.Row() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tables.Row()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_row( + parent="parent_value", + row=tables.Row(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].row + mock_val = tables.Row(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_row_flattened_error_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_row( + tables.CreateRowRequest(), + parent="parent_value", + row=tables.Row(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tables.BatchCreateRowsRequest, + dict, + ], +) +def test_batch_create_rows(request_type, transport: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = tables.BatchCreateRowsResponse() + response = client.batch_create_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == tables.BatchCreateRowsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.BatchCreateRowsResponse) + + +def test_batch_create_rows_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_rows), "__call__" + ) as call: + client.batch_create_rows() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == tables.BatchCreateRowsRequest() + + +@pytest.mark.asyncio +async def test_batch_create_rows_async( + transport: str = "grpc_asyncio", request_type=tables.BatchCreateRowsRequest +): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tables.BatchCreateRowsResponse() + ) + response = await client.batch_create_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == tables.BatchCreateRowsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.BatchCreateRowsResponse) + + +@pytest.mark.asyncio +async def test_batch_create_rows_async_from_dict(): + await test_batch_create_rows_async(request_type=dict) + + +def test_batch_create_rows_field_headers(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.BatchCreateRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_rows), "__call__" + ) as call: + call.return_value = tables.BatchCreateRowsResponse() + client.batch_create_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_create_rows_field_headers_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.BatchCreateRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_rows), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tables.BatchCreateRowsResponse() + ) + await client.batch_create_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + tables.UpdateRowRequest, + dict, + ], +) +def test_update_row(request_type, transport: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.Row( + name="name_value", + ) + response = client.update_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == tables.UpdateRowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.Row) + assert response.name == "name_value" + + +def test_update_row_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_row), "__call__") as call: + client.update_row() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == tables.UpdateRowRequest() + + +@pytest.mark.asyncio +async def test_update_row_async( + transport: str = "grpc_asyncio", request_type=tables.UpdateRowRequest +): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tables.Row( + name="name_value", + ) + ) + response = await client.update_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == tables.UpdateRowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.Row) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_update_row_async_from_dict(): + await test_update_row_async(request_type=dict) + + +def test_update_row_field_headers(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.UpdateRowRequest() + + request.row.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_row), "__call__") as call: + call.return_value = tables.Row() + client.update_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "row.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_row_field_headers_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.UpdateRowRequest() + + request.row.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_row), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tables.Row()) + await client.update_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "row.name=name_value", + ) in kw["metadata"] + + +def test_update_row_flattened(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.Row() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_row( + row=tables.Row(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].row + mock_val = tables.Row(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_row_flattened_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_row( + tables.UpdateRowRequest(), + row=tables.Row(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_row_flattened_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tables.Row() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tables.Row()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_row( + row=tables.Row(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].row + mock_val = tables.Row(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_row_flattened_error_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_row( + tables.UpdateRowRequest(), + row=tables.Row(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tables.BatchUpdateRowsRequest, + dict, + ], +) +def test_batch_update_rows(request_type, transport: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = tables.BatchUpdateRowsResponse() + response = client.batch_update_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == tables.BatchUpdateRowsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.BatchUpdateRowsResponse) + + +def test_batch_update_rows_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_rows), "__call__" + ) as call: + client.batch_update_rows() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == tables.BatchUpdateRowsRequest() + + +@pytest.mark.asyncio +async def test_batch_update_rows_async( + transport: str = "grpc_asyncio", request_type=tables.BatchUpdateRowsRequest +): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tables.BatchUpdateRowsResponse() + ) + response = await client.batch_update_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == tables.BatchUpdateRowsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.BatchUpdateRowsResponse) + + +@pytest.mark.asyncio +async def test_batch_update_rows_async_from_dict(): + await test_batch_update_rows_async(request_type=dict) + + +def test_batch_update_rows_field_headers(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.BatchUpdateRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_rows), "__call__" + ) as call: + call.return_value = tables.BatchUpdateRowsResponse() + client.batch_update_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_update_rows_field_headers_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.BatchUpdateRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_rows), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tables.BatchUpdateRowsResponse() + ) + await client.batch_update_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + tables.DeleteRowRequest, + dict, + ], +) +def test_delete_row(request_type, transport: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == tables.DeleteRowRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_row_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_row), "__call__") as call: + client.delete_row() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == tables.DeleteRowRequest() + + +@pytest.mark.asyncio +async def test_delete_row_async( + transport: str = "grpc_asyncio", request_type=tables.DeleteRowRequest +): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == tables.DeleteRowRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_row_async_from_dict(): + await test_delete_row_async(request_type=dict) + + +def test_delete_row_field_headers(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.DeleteRowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_row), "__call__") as call: + call.return_value = None + client.delete_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_row_field_headers_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.DeleteRowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_row), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_row_flattened(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_row( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_row_flattened_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_row( + tables.DeleteRowRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_row_flattened_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_row( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_row_flattened_error_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_row( + tables.DeleteRowRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tables.BatchDeleteRowsRequest, + dict, + ], +) +def test_batch_delete_rows(request_type, transport: str = "grpc"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.batch_delete_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == tables.BatchDeleteRowsRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_batch_delete_rows_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_rows), "__call__" + ) as call: + client.batch_delete_rows() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == tables.BatchDeleteRowsRequest() + + +@pytest.mark.asyncio +async def test_batch_delete_rows_async( + transport: str = "grpc_asyncio", request_type=tables.BatchDeleteRowsRequest +): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.batch_delete_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == tables.BatchDeleteRowsRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_batch_delete_rows_async_from_dict(): + await test_batch_delete_rows_async(request_type=dict) + + +def test_batch_delete_rows_field_headers(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.BatchDeleteRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_rows), "__call__" + ) as call: + call.return_value = None + client.batch_delete_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_delete_rows_field_headers_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tables.BatchDeleteRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_rows), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.batch_delete_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + tables.GetTableRequest, + dict, + ], +) +def test_get_table_rest(request_type): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tables/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.Table( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.Table) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_table_rest_required_fields(request_type=tables.GetTableRequest): + transport_class = transports.TablesServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = tables.Table() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = tables.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_table(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_table_rest_unset_required_fields(): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_table._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_table_rest_interceptors(null_interceptor): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TablesServiceRestInterceptor(), + ) + client = TablesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_get_table" + ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "pre_get_table" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = tables.GetTableRequest.pb(tables.GetTableRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = tables.Table.to_json(tables.Table()) + + request = tables.GetTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tables.Table() + + client.get_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_table_rest_bad_request( + transport: str = "rest", request_type=tables.GetTableRequest +): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tables/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_table(request) + + +def test_get_table_rest_flattened(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.Table() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "tables/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=tables/*}" % client.transport._host, args[1] + ) + + +def test_get_table_rest_flattened_error(transport: str = "rest"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_table( + tables.GetTableRequest(), + name="name_value", + ) + + +def test_get_table_rest_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tables.ListTablesRequest, + dict, + ], +) +def test_list_tables_rest(request_type): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.ListTablesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.ListTablesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_tables(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTablesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_tables_rest_interceptors(null_interceptor): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TablesServiceRestInterceptor(), + ) + client = TablesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_list_tables" + ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "pre_list_tables" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = tables.ListTablesRequest.pb(tables.ListTablesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = tables.ListTablesResponse.to_json( + tables.ListTablesResponse() + ) + + request = tables.ListTablesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tables.ListTablesResponse() + + client.list_tables( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_tables_rest_bad_request( + transport: str = "rest", request_type=tables.ListTablesRequest +): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_tables(request) + + +def test_list_tables_rest_pager(transport: str = "rest"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + tables.ListTablesResponse( + tables=[ + tables.Table(), + tables.Table(), + tables.Table(), + ], + next_page_token="abc", + ), + tables.ListTablesResponse( + tables=[], + next_page_token="def", + ), + tables.ListTablesResponse( + tables=[ + tables.Table(), + ], + next_page_token="ghi", + ), + tables.ListTablesResponse( + tables=[ + tables.Table(), + tables.Table(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(tables.ListTablesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_tables(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tables.Table) for i in results) + + pages = list(client.list_tables(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + tables.GetWorkspaceRequest, + dict, + ], +) +def test_get_workspace_rest(request_type): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "workspaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.Workspace( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.Workspace.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_workspace(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.Workspace) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_workspace_rest_required_fields(request_type=tables.GetWorkspaceRequest): + transport_class = transports.TablesServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workspace._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workspace._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = tables.Workspace() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = tables.Workspace.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_workspace(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_workspace_rest_unset_required_fields(): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_workspace._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_workspace_rest_interceptors(null_interceptor): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TablesServiceRestInterceptor(), + ) + client = TablesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_get_workspace" + ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "pre_get_workspace" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = tables.GetWorkspaceRequest.pb(tables.GetWorkspaceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = tables.Workspace.to_json(tables.Workspace()) + + request = tables.GetWorkspaceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tables.Workspace() + + client.get_workspace( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_workspace_rest_bad_request( + transport: str = "rest", request_type=tables.GetWorkspaceRequest +): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "workspaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_workspace(request) + + +def test_get_workspace_rest_flattened(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.Workspace() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "workspaces/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.Workspace.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_workspace(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=workspaces/*}" % client.transport._host, args[1] + ) + + +def test_get_workspace_rest_flattened_error(transport: str = "rest"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workspace( + tables.GetWorkspaceRequest(), + name="name_value", + ) + + +def test_get_workspace_rest_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tables.ListWorkspacesRequest, + dict, + ], +) +def test_list_workspaces_rest(request_type): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.ListWorkspacesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.ListWorkspacesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_workspaces(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkspacesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_workspaces_rest_interceptors(null_interceptor): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TablesServiceRestInterceptor(), + ) + client = TablesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_list_workspaces" + ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "pre_list_workspaces" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = tables.ListWorkspacesRequest.pb(tables.ListWorkspacesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = tables.ListWorkspacesResponse.to_json( + tables.ListWorkspacesResponse() + ) + + request = tables.ListWorkspacesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tables.ListWorkspacesResponse() + + client.list_workspaces( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_workspaces_rest_bad_request( + transport: str = "rest", request_type=tables.ListWorkspacesRequest +): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_workspaces(request) + + +def test_list_workspaces_rest_pager(transport: str = "rest"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + tables.ListWorkspacesResponse( + workspaces=[ + tables.Workspace(), + tables.Workspace(), + tables.Workspace(), + ], + next_page_token="abc", + ), + tables.ListWorkspacesResponse( + workspaces=[], + next_page_token="def", + ), + tables.ListWorkspacesResponse( + workspaces=[ + tables.Workspace(), + ], + next_page_token="ghi", + ), + tables.ListWorkspacesResponse( + workspaces=[ + tables.Workspace(), + tables.Workspace(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(tables.ListWorkspacesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_workspaces(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tables.Workspace) for i in results) + + pages = list(client.list_workspaces(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + tables.GetRowRequest, + dict, + ], +) +def test_get_row_rest(request_type): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tables/sample1/rows/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.Row( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.Row.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_row(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.Row) + assert response.name == "name_value" + + +def test_get_row_rest_required_fields(request_type=tables.GetRowRequest): + transport_class = transports.TablesServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_row._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_row._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = tables.Row() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = tables.Row.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_row(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_row_rest_unset_required_fields(): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_row._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_row_rest_interceptors(null_interceptor): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TablesServiceRestInterceptor(), + ) + client = TablesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_get_row" + ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "pre_get_row" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = tables.GetRowRequest.pb(tables.GetRowRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = tables.Row.to_json(tables.Row()) + + request = tables.GetRowRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tables.Row() + + client.get_row( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_row_rest_bad_request( + transport: str = "rest", request_type=tables.GetRowRequest +): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tables/sample1/rows/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_row(request) + + +def test_get_row_rest_flattened(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.Row() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "tables/sample1/rows/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.Row.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_row(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=tables/*/rows/*}" % client.transport._host, args[1] + ) + + +def test_get_row_rest_flattened_error(transport: str = "rest"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_row( + tables.GetRowRequest(), + name="name_value", + ) + + +def test_get_row_rest_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tables.ListRowsRequest, + dict, + ], +) +def test_list_rows_rest(request_type): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tables/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.ListRowsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.ListRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_rows(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRowsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_rows_rest_required_fields(request_type=tables.ListRowsRequest): + transport_class = transports.TablesServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_rows._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = tables.ListRowsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = tables.ListRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_rows(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rows_rest_unset_required_fields(): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_rows._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rows_rest_interceptors(null_interceptor): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TablesServiceRestInterceptor(), + ) + client = TablesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_list_rows" + ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "pre_list_rows" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = tables.ListRowsRequest.pb(tables.ListRowsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = tables.ListRowsResponse.to_json( + tables.ListRowsResponse() + ) + + request = tables.ListRowsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tables.ListRowsResponse() + + client.list_rows( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rows_rest_bad_request( + transport: str = "rest", request_type=tables.ListRowsRequest +): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tables/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_rows(request) + + +def test_list_rows_rest_flattened(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.ListRowsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "tables/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.ListRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_rows(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=tables/*}/rows" % client.transport._host, args[1] + ) + + +def test_list_rows_rest_flattened_error(transport: str = "rest"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_rows( + tables.ListRowsRequest(), + parent="parent_value", + ) + + +def test_list_rows_rest_pager(transport: str = "rest"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + tables.ListRowsResponse( + rows=[ + tables.Row(), + tables.Row(), + tables.Row(), + ], + next_page_token="abc", + ), + tables.ListRowsResponse( + rows=[], + next_page_token="def", + ), + tables.ListRowsResponse( + rows=[ + tables.Row(), + ], + next_page_token="ghi", + ), + tables.ListRowsResponse( + rows=[ + tables.Row(), + tables.Row(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(tables.ListRowsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "tables/sample1"} + + pager = client.list_rows(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tables.Row) for i in results) + + pages = list(client.list_rows(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + tables.CreateRowRequest, + dict, + ], +) +def test_create_row_rest(request_type): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tables/sample1"} + request_init["row"] = {"name": "name_value", "values": {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.Row( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.Row.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_row(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.Row) + assert response.name == "name_value" + + +def test_create_row_rest_required_fields(request_type=tables.CreateRowRequest): + transport_class = transports.TablesServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_row._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_row._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = tables.Row() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = tables.Row.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_row(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_row_rest_unset_required_fields(): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_row._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("view",)) + & set( + ( + "parent", + "row", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_row_rest_interceptors(null_interceptor): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TablesServiceRestInterceptor(), + ) + client = TablesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_create_row" + ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "pre_create_row" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = tables.CreateRowRequest.pb(tables.CreateRowRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = tables.Row.to_json(tables.Row()) + + request = tables.CreateRowRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tables.Row() + + client.create_row( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_row_rest_bad_request( + transport: str = "rest", request_type=tables.CreateRowRequest +): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tables/sample1"} + request_init["row"] = {"name": "name_value", "values": {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_row(request) + + +def test_create_row_rest_flattened(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.Row() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "tables/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + row=tables.Row(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.Row.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_row(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=tables/*}/rows" % client.transport._host, args[1] + ) + + +def test_create_row_rest_flattened_error(transport: str = "rest"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_row( + tables.CreateRowRequest(), + parent="parent_value", + row=tables.Row(name="name_value"), + ) + + +def test_create_row_rest_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tables.BatchCreateRowsRequest, + dict, + ], +) +def test_batch_create_rows_rest(request_type): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tables/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.BatchCreateRowsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.BatchCreateRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_create_rows(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.BatchCreateRowsResponse) + + +def test_batch_create_rows_rest_required_fields( + request_type=tables.BatchCreateRowsRequest, +): + transport_class = transports.TablesServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = tables.BatchCreateRowsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = tables.BatchCreateRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_create_rows(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_create_rows_rest_unset_required_fields(): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_create_rows._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_create_rows_rest_interceptors(null_interceptor): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TablesServiceRestInterceptor(), + ) + client = TablesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_batch_create_rows" + ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "pre_batch_create_rows" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = tables.BatchCreateRowsRequest.pb(tables.BatchCreateRowsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = tables.BatchCreateRowsResponse.to_json( + tables.BatchCreateRowsResponse() + ) + + request = tables.BatchCreateRowsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tables.BatchCreateRowsResponse() + + client.batch_create_rows( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_create_rows_rest_bad_request( + transport: str = "rest", request_type=tables.BatchCreateRowsRequest +): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tables/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_create_rows(request) + + +def test_batch_create_rows_rest_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tables.UpdateRowRequest, + dict, + ], +) +def test_update_row_rest(request_type): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"row": {"name": "tables/sample1/rows/sample2"}} + request_init["row"] = {"name": "tables/sample1/rows/sample2", "values": {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.Row( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.Row.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_row(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.Row) + assert response.name == "name_value" + + +def test_update_row_rest_required_fields(request_type=tables.UpdateRowRequest): + transport_class = transports.TablesServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_row._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_row._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "update_mask", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = tables.Row() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = tables.Row.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_row(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_row_rest_unset_required_fields(): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_row._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "view", + ) + ) + & set(("row",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_row_rest_interceptors(null_interceptor): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TablesServiceRestInterceptor(), + ) + client = TablesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_update_row" + ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "pre_update_row" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = tables.UpdateRowRequest.pb(tables.UpdateRowRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = tables.Row.to_json(tables.Row()) + + request = tables.UpdateRowRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tables.Row() + + client.update_row( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_row_rest_bad_request( + transport: str = "rest", request_type=tables.UpdateRowRequest +): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"row": {"name": "tables/sample1/rows/sample2"}} + request_init["row"] = {"name": "tables/sample1/rows/sample2", "values": {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_row(request) + + +def test_update_row_rest_flattened(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.Row() + + # get arguments that satisfy an http rule for this method + sample_request = {"row": {"name": "tables/sample1/rows/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + row=tables.Row(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.Row.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_row(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{row.name=tables/*/rows/*}" % client.transport._host, args[1] + ) + + +def test_update_row_rest_flattened_error(transport: str = "rest"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_row( + tables.UpdateRowRequest(), + row=tables.Row(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_row_rest_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tables.BatchUpdateRowsRequest, + dict, + ], +) +def test_batch_update_rows_rest(request_type): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tables/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tables.BatchUpdateRowsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = tables.BatchUpdateRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_update_rows(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, tables.BatchUpdateRowsResponse) + + +def test_batch_update_rows_rest_required_fields( + request_type=tables.BatchUpdateRowsRequest, +): + transport_class = transports.TablesServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_update_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_update_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = tables.BatchUpdateRowsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = tables.BatchUpdateRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_update_rows(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_update_rows_rest_unset_required_fields(): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_update_rows._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_update_rows_rest_interceptors(null_interceptor): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TablesServiceRestInterceptor(), + ) + client = TablesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_batch_update_rows" + ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "pre_batch_update_rows" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = tables.BatchUpdateRowsRequest.pb(tables.BatchUpdateRowsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = tables.BatchUpdateRowsResponse.to_json( + tables.BatchUpdateRowsResponse() + ) + + request = tables.BatchUpdateRowsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tables.BatchUpdateRowsResponse() + + client.batch_update_rows( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_update_rows_rest_bad_request( + transport: str = "rest", request_type=tables.BatchUpdateRowsRequest +): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tables/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_update_rows(request) + + +def test_batch_update_rows_rest_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tables.DeleteRowRequest, + dict, + ], +) +def test_delete_row_rest(request_type): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tables/sample1/rows/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_row(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_row_rest_required_fields(request_type=tables.DeleteRowRequest): + transport_class = transports.TablesServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_row._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_row._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_row(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_row_rest_unset_required_fields(): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_row._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_row_rest_interceptors(null_interceptor): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TablesServiceRestInterceptor(), + ) + client = TablesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TablesServiceRestInterceptor, "pre_delete_row" + ) as pre: + pre.assert_not_called() + pb_message = tables.DeleteRowRequest.pb(tables.DeleteRowRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = tables.DeleteRowRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_row( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_row_rest_bad_request( + transport: str = "rest", request_type=tables.DeleteRowRequest +): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tables/sample1/rows/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_row(request) + + +def test_delete_row_rest_flattened(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "tables/sample1/rows/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_row(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=tables/*/rows/*}" % client.transport._host, args[1] + ) + + +def test_delete_row_rest_flattened_error(transport: str = "rest"): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_row( + tables.DeleteRowRequest(), + name="name_value", + ) + + +def test_delete_row_rest_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tables.BatchDeleteRowsRequest, + dict, + ], +) +def test_batch_delete_rows_rest(request_type): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tables/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_delete_rows(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_batch_delete_rows_rest_required_fields( + request_type=tables.BatchDeleteRowsRequest, +): + transport_class = transports.TablesServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["names"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_delete_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["names"] = "names_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_delete_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "names" in jsonified_request + assert jsonified_request["names"] == "names_value" + + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_delete_rows(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_delete_rows_rest_unset_required_fields(): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_delete_rows._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "names", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_delete_rows_rest_interceptors(null_interceptor): + transport = transports.TablesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TablesServiceRestInterceptor(), + ) + client = TablesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TablesServiceRestInterceptor, "pre_batch_delete_rows" + ) as pre: + pre.assert_not_called() + pb_message = tables.BatchDeleteRowsRequest.pb(tables.BatchDeleteRowsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = tables.BatchDeleteRowsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.batch_delete_rows( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_batch_delete_rows_rest_bad_request( + transport: str = "rest", request_type=tables.BatchDeleteRowsRequest +): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tables/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_delete_rows(request) + + +def test_batch_delete_rows_rest_error(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TablesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TablesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TablesServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TablesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TablesServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TablesServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TablesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TablesServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TablesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TablesServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TablesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TablesServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TablesServiceGrpcTransport, + transports.TablesServiceGrpcAsyncIOTransport, + transports.TablesServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = TablesServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TablesServiceGrpcTransport, + ) + + +def test_tables_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TablesServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_tables_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.area120.tables_v1alpha1.services.tables_service.transports.TablesServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.TablesServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_table", + "list_tables", + "get_workspace", + "list_workspaces", + "get_row", + "list_rows", + "create_row", + "batch_create_rows", + "update_row", + "batch_update_rows", + "delete_row", + "batch_delete_rows", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_tables_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.area120.tables_v1alpha1.services.tables_service.transports.TablesServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TablesServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file", + "https://www.googleapis.com/auth/drive.readonly", + "https://www.googleapis.com/auth/spreadsheets", + "https://www.googleapis.com/auth/spreadsheets.readonly", + "https://www.googleapis.com/auth/tables", + ), + quota_project_id="octopus", + ) + + +def test_tables_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.area120.tables_v1alpha1.services.tables_service.transports.TablesServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TablesServiceTransport() + adc.assert_called_once() + + +def test_tables_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TablesServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file", + "https://www.googleapis.com/auth/drive.readonly", + "https://www.googleapis.com/auth/spreadsheets", + "https://www.googleapis.com/auth/spreadsheets.readonly", + "https://www.googleapis.com/auth/tables", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TablesServiceGrpcTransport, + transports.TablesServiceGrpcAsyncIOTransport, + ], +) +def test_tables_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file", + "https://www.googleapis.com/auth/drive.readonly", + "https://www.googleapis.com/auth/spreadsheets", + "https://www.googleapis.com/auth/spreadsheets.readonly", + "https://www.googleapis.com/auth/tables", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TablesServiceGrpcTransport, + transports.TablesServiceGrpcAsyncIOTransport, + transports.TablesServiceRestTransport, + ], +) +def test_tables_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TablesServiceGrpcTransport, grpc_helpers), + (transports.TablesServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_tables_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "area120tables.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file", + "https://www.googleapis.com/auth/drive.readonly", + "https://www.googleapis.com/auth/spreadsheets", + "https://www.googleapis.com/auth/spreadsheets.readonly", + "https://www.googleapis.com/auth/tables", + ), + scopes=["1", "2"], + default_host="area120tables.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TablesServiceGrpcTransport, + transports.TablesServiceGrpcAsyncIOTransport, + ], +) +def test_tables_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_tables_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.TablesServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_tables_service_host_no_port(transport_name): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="area120tables.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "area120tables.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://area120tables.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_tables_service_host_with_port(transport_name): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="area120tables.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "area120tables.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://area120tables.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_tables_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TablesServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TablesServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_table._session + session2 = client2.transport.get_table._session + assert session1 != session2 + session1 = client1.transport.list_tables._session + session2 = client2.transport.list_tables._session + assert session1 != session2 + session1 = client1.transport.get_workspace._session + session2 = client2.transport.get_workspace._session + assert session1 != session2 + session1 = client1.transport.list_workspaces._session + session2 = client2.transport.list_workspaces._session + assert session1 != session2 + session1 = client1.transport.get_row._session + session2 = client2.transport.get_row._session + assert session1 != session2 + session1 = client1.transport.list_rows._session + session2 = client2.transport.list_rows._session + assert session1 != session2 + session1 = client1.transport.create_row._session + session2 = client2.transport.create_row._session + assert session1 != session2 + session1 = client1.transport.batch_create_rows._session + session2 = client2.transport.batch_create_rows._session + assert session1 != session2 + session1 = client1.transport.update_row._session + session2 = client2.transport.update_row._session + assert session1 != session2 + session1 = client1.transport.batch_update_rows._session + session2 = client2.transport.batch_update_rows._session + assert session1 != session2 + session1 = client1.transport.delete_row._session + session2 = client2.transport.delete_row._session + assert session1 != session2 + session1 = client1.transport.batch_delete_rows._session + session2 = client2.transport.batch_delete_rows._session + assert session1 != session2 + + +def test_tables_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TablesServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_tables_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TablesServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.TablesServiceGrpcTransport, + transports.TablesServiceGrpcAsyncIOTransport, + ], +) +def test_tables_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.TablesServiceGrpcTransport, + transports.TablesServiceGrpcAsyncIOTransport, + ], +) +def test_tables_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_row_path(): + table = "squid" + row = "clam" + expected = "tables/{table}/rows/{row}".format( + table=table, + row=row, + ) + actual = TablesServiceClient.row_path(table, row) + assert expected == actual + + +def test_parse_row_path(): + expected = { + "table": "whelk", + "row": "octopus", + } + path = TablesServiceClient.row_path(**expected) + + # Check that the path construction is reversible. + actual = TablesServiceClient.parse_row_path(path) + assert expected == actual + + +def test_table_path(): + table = "oyster" + expected = "tables/{table}".format( + table=table, + ) + actual = TablesServiceClient.table_path(table) + assert expected == actual + + +def test_parse_table_path(): + expected = { + "table": "nudibranch", + } + path = TablesServiceClient.table_path(**expected) + + # Check that the path construction is reversible. + actual = TablesServiceClient.parse_table_path(path) + assert expected == actual + + +def test_workspace_path(): + workspace = "cuttlefish" + expected = "workspaces/{workspace}".format( + workspace=workspace, + ) + actual = TablesServiceClient.workspace_path(workspace) + assert expected == actual + + +def test_parse_workspace_path(): + expected = { + "workspace": "mussel", + } + path = TablesServiceClient.workspace_path(**expected) + + # Check that the path construction is reversible. + actual = TablesServiceClient.parse_workspace_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = TablesServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = TablesServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TablesServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = TablesServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = TablesServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TablesServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = TablesServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = TablesServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TablesServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format( + project=project, + ) + actual = TablesServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = TablesServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TablesServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = TablesServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = TablesServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TablesServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.TablesServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.TablesServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = TablesServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = TablesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = TablesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (TablesServiceClient, transports.TablesServiceGrpcTransport), + (TablesServiceAsyncClient, transports.TablesServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-advisorynotifications/.OwlBot.yaml b/packages/google-cloud-advisorynotifications/.OwlBot.yaml new file mode 100644 index 000000000000..383bd5fa9b09 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/.OwlBot.yaml @@ -0,0 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/advisorynotifications/(.*)/.*-py + dest: /owl-bot-staging/google-cloud-advisorynotifications/$1 diff --git a/packages/google-cloud-advisorynotifications/.coveragerc b/packages/google-cloud-advisorynotifications/.coveragerc new file mode 100644 index 000000000000..1a0cc65b3664 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/advisorynotifications/__init__.py + google/cloud/advisorynotifications/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-advisorynotifications/.flake8 b/packages/google-cloud-advisorynotifications/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-cloud-advisorynotifications/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-advisorynotifications/.gitignore b/packages/google-cloud-advisorynotifications/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-advisorynotifications/.repo-metadata.json b/packages/google-cloud-advisorynotifications/.repo-metadata.json new file mode 100644 index 000000000000..6dd90b2d5e2a --- /dev/null +++ b/packages/google-cloud-advisorynotifications/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "advisorynotifications", + "name_pretty": "Advisory Notifications", + "api_description": "Advisory Notifications provides well-targeted, timely, and compliant communications about critical security and privacy events in the Google Cloud console and allows you to securely investigate the event, take action, and get support.", + "product_documentation": "https://cloud.google.com/advisory-notifications/", + "client_documentation": "https://cloud.google.com/python/docs/reference/advisorynotifications/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-advisorynotifications", + "api_id": "advisorynotifications.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "advisorynotifications" +} diff --git a/packages/google-cloud-advisorynotifications/CHANGELOG.md b/packages/google-cloud-advisorynotifications/CHANGELOG.md new file mode 100644 index 000000000000..a637ab54d0d8 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/CHANGELOG.md @@ -0,0 +1,24 @@ +# Changelog + +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-advisorynotifications-v0.1.1...google-cloud-advisorynotifications-v0.2.0) (2023-03-30) + + +### Features + +* Add NotificationType field for advisorynotifications.googleapis.com ([#10877](https://github.com/googleapis/google-cloud-python/issues/10877)) ([96b9091](https://github.com/googleapis/google-cloud-python/commit/96b9091776a7355c9fc52f6d3c85475ecbaec38f)) + +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-advisorynotifications-v0.1.0...google-cloud-advisorynotifications-v0.1.1) (2023-03-25) + + +### Documentation + +* Fix formatting of request arg in docstring ([#10867](https://github.com/googleapis/google-cloud-python/issues/10867)) ([d34a425](https://github.com/googleapis/google-cloud-python/commit/d34a425f7d0f02bebaf20d24b725b8c25c699697)) + +## 0.1.0 (2023-02-14) + + +### Features + +* add initial files for google.cloud.advisorynotifications.v1 ([#10841](https://github.com/googleapis/google-cloud-python/issues/10841)) ([2d290ee](https://github.com/googleapis/google-cloud-python/commit/2d290eed6b6c3e0c5ac447289c697408ffdbdebe)) + +## Changelog diff --git a/packages/google-cloud-advisorynotifications/CODE_OF_CONDUCT.md b/packages/google-cloud-advisorynotifications/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-advisorynotifications/CONTRIBUTING.rst b/packages/google-cloud-advisorynotifications/CONTRIBUTING.rst new file mode 100644 index 000000000000..fa701a8b53de --- /dev/null +++ b/packages/google-cloud-advisorynotifications/CONTRIBUTING.rst @@ -0,0 +1,281 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-advisorynotifications + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +We also explicitly decided to support Python 3 beginning with version 3.7. +Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-advisorynotifications/LICENSE b/packages/google-cloud-advisorynotifications/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-advisorynotifications/MANIFEST.in b/packages/google-cloud-advisorynotifications/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-cloud-advisorynotifications/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-advisorynotifications/README.rst b/packages/google-cloud-advisorynotifications/README.rst new file mode 100644 index 000000000000..491f3e723464 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/README.rst @@ -0,0 +1,107 @@ +Python Client for Advisory Notifications +======================================== + +|preview| |pypi| |versions| + +`Advisory Notifications`_: Advisory Notifications provides well-targeted, timely, and compliant communications about critical security and privacy events in the Google Cloud console and allows you to securely investigate the event, take action, and get support. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-advisorynotifications.svg + :target: https://pypi.org/project/google-cloud-advisorynotifications/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-advisorynotifications.svg + :target: https://pypi.org/project/google-cloud-advisorynotifications/ +.. _Advisory Notifications: https://cloud.google.com/advisory-notifications/ +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/advisorynotifications/latest +.. _Product Documentation: https://cloud.google.com/advisory-notifications/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Advisory Notifications.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Advisory Notifications.: https://cloud.google.com/advisory-notifications/ +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-advisorynotifications + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-advisorynotifications + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Advisory Notifications + to see other available methods on the client. +- Read the `Advisory Notifications Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Advisory Notifications Product documentation: https://cloud.google.com/advisory-notifications/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-advisorynotifications/SECURITY.md b/packages/google-cloud-advisorynotifications/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-advisorynotifications/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-advisorynotifications/docs/CHANGELOG.md b/packages/google-cloud-advisorynotifications/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-advisorynotifications/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-advisorynotifications/docs/README.rst b/packages/google-cloud-advisorynotifications/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-advisorynotifications/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-advisorynotifications/docs/_static/custom.css b/packages/google-cloud-advisorynotifications/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-advisorynotifications/docs/_templates/layout.html b/packages/google-cloud-advisorynotifications/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-advisorynotifications/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-advisorynotifications/docs/advisorynotifications_v1/advisory_notifications_service.rst b/packages/google-cloud-advisorynotifications/docs/advisorynotifications_v1/advisory_notifications_service.rst new file mode 100644 index 000000000000..f6cea06fc761 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/docs/advisorynotifications_v1/advisory_notifications_service.rst @@ -0,0 +1,10 @@ +AdvisoryNotificationsService +---------------------------------------------- + +.. automodule:: google.cloud.advisorynotifications_v1.services.advisory_notifications_service + :members: + :inherited-members: + +.. automodule:: google.cloud.advisorynotifications_v1.services.advisory_notifications_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-advisorynotifications/docs/advisorynotifications_v1/services.rst b/packages/google-cloud-advisorynotifications/docs/advisorynotifications_v1/services.rst new file mode 100644 index 000000000000..08b43dcf14e4 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/docs/advisorynotifications_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Advisorynotifications v1 API +====================================================== +.. toctree:: + :maxdepth: 2 + + advisory_notifications_service diff --git a/packages/google-cloud-advisorynotifications/docs/advisorynotifications_v1/types.rst b/packages/google-cloud-advisorynotifications/docs/advisorynotifications_v1/types.rst new file mode 100644 index 000000000000..800466cfcb2e --- /dev/null +++ b/packages/google-cloud-advisorynotifications/docs/advisorynotifications_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Advisorynotifications v1 API +=================================================== + +.. automodule:: google.cloud.advisorynotifications_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-advisorynotifications/docs/conf.py b/packages/google-cloud-advisorynotifications/docs/conf.py new file mode 100644 index 000000000000..bcb79914c994 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-advisorynotifications documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-advisorynotifications" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-advisorynotifications", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-advisorynotifications-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-advisorynotifications.tex", + "google-cloud-advisorynotifications Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-advisorynotifications", + "google-cloud-advisorynotifications Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-advisorynotifications", + "google-cloud-advisorynotifications Documentation", + author, + "google-cloud-advisorynotifications", + "google-cloud-advisorynotifications Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-advisorynotifications/docs/index.rst b/packages/google-cloud-advisorynotifications/docs/index.rst new file mode 100644 index 000000000000..8eee0f476f02 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + advisorynotifications_v1/services + advisorynotifications_v1/types + + +Changelog +--------- + +For a list of all ``google-cloud-advisorynotifications`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-advisorynotifications/docs/multiprocessing.rst b/packages/google-cloud-advisorynotifications/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/__init__.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/__init__.py new file mode 100644 index 000000000000..fd0c8088b23b --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/__init__.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.advisorynotifications import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.advisorynotifications_v1.services.advisory_notifications_service.async_client import ( + AdvisoryNotificationsServiceAsyncClient, +) +from google.cloud.advisorynotifications_v1.services.advisory_notifications_service.client import ( + AdvisoryNotificationsServiceClient, +) +from google.cloud.advisorynotifications_v1.types.service import ( + Attachment, + Csv, + GetNotificationRequest, + ListNotificationsRequest, + ListNotificationsResponse, + LocalizationState, + Message, + Notification, + NotificationType, + NotificationView, + Subject, + Text, +) + +__all__ = ( + "AdvisoryNotificationsServiceClient", + "AdvisoryNotificationsServiceAsyncClient", + "Attachment", + "Csv", + "GetNotificationRequest", + "ListNotificationsRequest", + "ListNotificationsResponse", + "Message", + "Notification", + "Subject", + "Text", + "LocalizationState", + "NotificationType", + "NotificationView", +) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py new file mode 100644 index 000000000000..e94c45aea923 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/py.typed b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/py.typed new file mode 100644 index 000000000000..2a5c03c06eb4 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-advisorynotifications package uses inline types. diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/__init__.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/__init__.py new file mode 100644 index 000000000000..4478a6ca34ce --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/__init__.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.advisorynotifications_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.advisory_notifications_service import ( + AdvisoryNotificationsServiceAsyncClient, + AdvisoryNotificationsServiceClient, +) +from .types.service import ( + Attachment, + Csv, + GetNotificationRequest, + ListNotificationsRequest, + ListNotificationsResponse, + LocalizationState, + Message, + Notification, + NotificationType, + NotificationView, + Subject, + Text, +) + +__all__ = ( + "AdvisoryNotificationsServiceAsyncClient", + "AdvisoryNotificationsServiceClient", + "Attachment", + "Csv", + "GetNotificationRequest", + "ListNotificationsRequest", + "ListNotificationsResponse", + "LocalizationState", + "Message", + "Notification", + "NotificationType", + "NotificationView", + "Subject", + "Text", +) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_metadata.json b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_metadata.json new file mode 100644 index 000000000000..5c1c2efdc5bb --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_metadata.json @@ -0,0 +1,58 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.advisorynotifications_v1", + "protoPackage": "google.cloud.advisorynotifications.v1", + "schema": "1.0", + "services": { + "AdvisoryNotificationsService": { + "clients": { + "grpc": { + "libraryClient": "AdvisoryNotificationsServiceClient", + "rpcs": { + "GetNotification": { + "methods": [ + "get_notification" + ] + }, + "ListNotifications": { + "methods": [ + "list_notifications" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AdvisoryNotificationsServiceAsyncClient", + "rpcs": { + "GetNotification": { + "methods": [ + "get_notification" + ] + }, + "ListNotifications": { + "methods": [ + "list_notifications" + ] + } + } + }, + "rest": { + "libraryClient": "AdvisoryNotificationsServiceClient", + "rpcs": { + "GetNotification": { + "methods": [ + "get_notification" + ] + }, + "ListNotifications": { + "methods": [ + "list_notifications" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py new file mode 100644 index 000000000000..e94c45aea923 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/py.typed b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/py.typed new file mode 100644 index 000000000000..2a5c03c06eb4 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-advisorynotifications package uses inline types. diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/__init__.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/__init__.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/__init__.py new file mode 100644 index 000000000000..dc8b676863df --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AdvisoryNotificationsServiceAsyncClient +from .client import AdvisoryNotificationsServiceClient + +__all__ = ( + "AdvisoryNotificationsServiceClient", + "AdvisoryNotificationsServiceAsyncClient", +) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py new file mode 100644 index 000000000000..b181c6896983 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py @@ -0,0 +1,482 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.advisorynotifications_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.advisorynotifications_v1.services.advisory_notifications_service import ( + pagers, +) +from google.cloud.advisorynotifications_v1.types import service + +from .client import AdvisoryNotificationsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, AdvisoryNotificationsServiceTransport +from .transports.grpc_asyncio import AdvisoryNotificationsServiceGrpcAsyncIOTransport + + +class AdvisoryNotificationsServiceAsyncClient: + """Service to manage Security and Privacy Notifications.""" + + _client: AdvisoryNotificationsServiceClient + + DEFAULT_ENDPOINT = AdvisoryNotificationsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AdvisoryNotificationsServiceClient.DEFAULT_MTLS_ENDPOINT + + notification_path = staticmethod( + AdvisoryNotificationsServiceClient.notification_path + ) + parse_notification_path = staticmethod( + AdvisoryNotificationsServiceClient.parse_notification_path + ) + common_billing_account_path = staticmethod( + AdvisoryNotificationsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AdvisoryNotificationsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + AdvisoryNotificationsServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + AdvisoryNotificationsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + AdvisoryNotificationsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + AdvisoryNotificationsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + AdvisoryNotificationsServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + AdvisoryNotificationsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + AdvisoryNotificationsServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + AdvisoryNotificationsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AdvisoryNotificationsServiceAsyncClient: The constructed client. + """ + return AdvisoryNotificationsServiceClient.from_service_account_info.__func__(AdvisoryNotificationsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AdvisoryNotificationsServiceAsyncClient: The constructed client. + """ + return AdvisoryNotificationsServiceClient.from_service_account_file.__func__(AdvisoryNotificationsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AdvisoryNotificationsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AdvisoryNotificationsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AdvisoryNotificationsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(AdvisoryNotificationsServiceClient).get_transport_class, + type(AdvisoryNotificationsServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, AdvisoryNotificationsServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the advisory notifications service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.AdvisoryNotificationsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AdvisoryNotificationsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_notifications( + self, + request: Optional[Union[service.ListNotificationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNotificationsAsyncPager: + r"""Lists notifications under a given parent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import advisorynotifications_v1 + + async def sample_list_notifications(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient() + + # Initialize request argument(s) + request = advisorynotifications_v1.ListNotificationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_notifications(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.advisorynotifications_v1.types.ListNotificationsRequest, dict]]): + The request object. Request for fetching all + notifications for a given parent. + parent (:class:`str`): + Required. The parent, which owns this + collection of notifications. Must be of + the form + "organizations/{organization}/locations/{location}". + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.advisorynotifications_v1.services.advisory_notifications_service.pagers.ListNotificationsAsyncPager: + Response of ListNotifications + endpoint. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListNotificationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_notifications, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListNotificationsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_notification( + self, + request: Optional[Union[service.GetNotificationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Notification: + r"""Gets a notification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import advisorynotifications_v1 + + async def sample_get_notification(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient() + + # Initialize request argument(s) + request = advisorynotifications_v1.GetNotificationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_notification(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.advisorynotifications_v1.types.GetNotificationRequest, dict]]): + The request object. Request for fetching a notification. + name (:class:`str`): + Required. A name of the notification + to retrieve. Format: + organizations/{organization}/locations/{location}/notifications/{notification}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.advisorynotifications_v1.types.Notification: + A notification object for notifying + customers about security and privacy + issues. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetNotificationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_notification, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AdvisoryNotificationsServiceAsyncClient",) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py new file mode 100644 index 000000000000..6653aecf5589 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py @@ -0,0 +1,693 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.advisorynotifications_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.advisorynotifications_v1.services.advisory_notifications_service import ( + pagers, +) +from google.cloud.advisorynotifications_v1.types import service + +from .transports.base import DEFAULT_CLIENT_INFO, AdvisoryNotificationsServiceTransport +from .transports.grpc import AdvisoryNotificationsServiceGrpcTransport +from .transports.grpc_asyncio import AdvisoryNotificationsServiceGrpcAsyncIOTransport +from .transports.rest import AdvisoryNotificationsServiceRestTransport + + +class AdvisoryNotificationsServiceClientMeta(type): + """Metaclass for the AdvisoryNotificationsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[AdvisoryNotificationsServiceTransport]] + _transport_registry["grpc"] = AdvisoryNotificationsServiceGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = AdvisoryNotificationsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = AdvisoryNotificationsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AdvisoryNotificationsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AdvisoryNotificationsServiceClient( + metaclass=AdvisoryNotificationsServiceClientMeta +): + """Service to manage Security and Privacy Notifications.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "advisorynotifications.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AdvisoryNotificationsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AdvisoryNotificationsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AdvisoryNotificationsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AdvisoryNotificationsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def notification_path( + organization: str, + location: str, + notification: str, + ) -> str: + """Returns a fully-qualified notification string.""" + return "organizations/{organization}/locations/{location}/notifications/{notification}".format( + organization=organization, + location=location, + notification=notification, + ) + + @staticmethod + def parse_notification_path(path: str) -> Dict[str, str]: + """Parses a notification path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/locations/(?P.+?)/notifications/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AdvisoryNotificationsServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the advisory notifications service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, AdvisoryNotificationsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, AdvisoryNotificationsServiceTransport): + # transport is a AdvisoryNotificationsServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_notifications( + self, + request: Optional[Union[service.ListNotificationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNotificationsPager: + r"""Lists notifications under a given parent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import advisorynotifications_v1 + + def sample_list_notifications(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceClient() + + # Initialize request argument(s) + request = advisorynotifications_v1.ListNotificationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_notifications(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.advisorynotifications_v1.types.ListNotificationsRequest, dict]): + The request object. Request for fetching all + notifications for a given parent. + parent (str): + Required. The parent, which owns this + collection of notifications. Must be of + the form + "organizations/{organization}/locations/{location}". + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.advisorynotifications_v1.services.advisory_notifications_service.pagers.ListNotificationsPager: + Response of ListNotifications + endpoint. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListNotificationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListNotificationsRequest): + request = service.ListNotificationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_notifications] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListNotificationsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_notification( + self, + request: Optional[Union[service.GetNotificationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Notification: + r"""Gets a notification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import advisorynotifications_v1 + + def sample_get_notification(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceClient() + + # Initialize request argument(s) + request = advisorynotifications_v1.GetNotificationRequest( + name="name_value", + ) + + # Make the request + response = client.get_notification(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.advisorynotifications_v1.types.GetNotificationRequest, dict]): + The request object. Request for fetching a notification. + name (str): + Required. A name of the notification + to retrieve. Format: + organizations/{organization}/locations/{location}/notifications/{notification}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.advisorynotifications_v1.types.Notification: + A notification object for notifying + customers about security and privacy + issues. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetNotificationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetNotificationRequest): + request = service.GetNotificationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_notification] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AdvisoryNotificationsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AdvisoryNotificationsServiceClient",) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/pagers.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/pagers.py new file mode 100644 index 000000000000..ea030916aecf --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.advisorynotifications_v1.types import service + + +class ListNotificationsPager: + """A pager for iterating through ``list_notifications`` requests. + + This class thinly wraps an initial + :class:`google.cloud.advisorynotifications_v1.types.ListNotificationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``notifications`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListNotifications`` requests and continue to iterate + through the ``notifications`` field on the + corresponding responses. + + All the usual :class:`google.cloud.advisorynotifications_v1.types.ListNotificationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListNotificationsResponse], + request: service.ListNotificationsRequest, + response: service.ListNotificationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.advisorynotifications_v1.types.ListNotificationsRequest): + The initial request object. + response (google.cloud.advisorynotifications_v1.types.ListNotificationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListNotificationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListNotificationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[service.Notification]: + for page in self.pages: + yield from page.notifications + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListNotificationsAsyncPager: + """A pager for iterating through ``list_notifications`` requests. + + This class thinly wraps an initial + :class:`google.cloud.advisorynotifications_v1.types.ListNotificationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``notifications`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListNotifications`` requests and continue to iterate + through the ``notifications`` field on the + corresponding responses. + + All the usual :class:`google.cloud.advisorynotifications_v1.types.ListNotificationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListNotificationsResponse]], + request: service.ListNotificationsRequest, + response: service.ListNotificationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.advisorynotifications_v1.types.ListNotificationsRequest): + The initial request object. + response (google.cloud.advisorynotifications_v1.types.ListNotificationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListNotificationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListNotificationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[service.Notification]: + async def async_generator(): + async for page in self.pages: + for response in page.notifications: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/__init__.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/__init__.py new file mode 100644 index 000000000000..81e078997a7f --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AdvisoryNotificationsServiceTransport +from .grpc import AdvisoryNotificationsServiceGrpcTransport +from .grpc_asyncio import AdvisoryNotificationsServiceGrpcAsyncIOTransport +from .rest import ( + AdvisoryNotificationsServiceRestInterceptor, + AdvisoryNotificationsServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[AdvisoryNotificationsServiceTransport]] +_transport_registry["grpc"] = AdvisoryNotificationsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = AdvisoryNotificationsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = AdvisoryNotificationsServiceRestTransport + +__all__ = ( + "AdvisoryNotificationsServiceTransport", + "AdvisoryNotificationsServiceGrpcTransport", + "AdvisoryNotificationsServiceGrpcAsyncIOTransport", + "AdvisoryNotificationsServiceRestTransport", + "AdvisoryNotificationsServiceRestInterceptor", +) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/base.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/base.py new file mode 100644 index 000000000000..6f15ed7afa2b --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/base.py @@ -0,0 +1,190 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.advisorynotifications_v1 import gapic_version as package_version +from google.cloud.advisorynotifications_v1.types import service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AdvisoryNotificationsServiceTransport(abc.ABC): + """Abstract transport class for AdvisoryNotificationsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "advisorynotifications.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_notifications: gapic_v1.method.wrap_method( + self.list_notifications, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_notification: gapic_v1.method.wrap_method( + self.get_notification, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_notifications( + self, + ) -> Callable[ + [service.ListNotificationsRequest], + Union[ + service.ListNotificationsResponse, + Awaitable[service.ListNotificationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_notification( + self, + ) -> Callable[ + [service.GetNotificationRequest], + Union[service.Notification, Awaitable[service.Notification]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AdvisoryNotificationsServiceTransport",) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/grpc.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/grpc.py new file mode 100644 index 000000000000..223c2d18d54d --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/grpc.py @@ -0,0 +1,293 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.cloud.advisorynotifications_v1.types import service + +from .base import DEFAULT_CLIENT_INFO, AdvisoryNotificationsServiceTransport + + +class AdvisoryNotificationsServiceGrpcTransport(AdvisoryNotificationsServiceTransport): + """gRPC backend transport for AdvisoryNotificationsService. + + Service to manage Security and Privacy Notifications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "advisorynotifications.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "advisorynotifications.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_notifications( + self, + ) -> Callable[ + [service.ListNotificationsRequest], service.ListNotificationsResponse + ]: + r"""Return a callable for the list notifications method over gRPC. + + Lists notifications under a given parent. + + Returns: + Callable[[~.ListNotificationsRequest], + ~.ListNotificationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_notifications" not in self._stubs: + self._stubs["list_notifications"] = self.grpc_channel.unary_unary( + "/google.cloud.advisorynotifications.v1.AdvisoryNotificationsService/ListNotifications", + request_serializer=service.ListNotificationsRequest.serialize, + response_deserializer=service.ListNotificationsResponse.deserialize, + ) + return self._stubs["list_notifications"] + + @property + def get_notification( + self, + ) -> Callable[[service.GetNotificationRequest], service.Notification]: + r"""Return a callable for the get notification method over gRPC. + + Gets a notification. + + Returns: + Callable[[~.GetNotificationRequest], + ~.Notification]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_notification" not in self._stubs: + self._stubs["get_notification"] = self.grpc_channel.unary_unary( + "/google.cloud.advisorynotifications.v1.AdvisoryNotificationsService/GetNotification", + request_serializer=service.GetNotificationRequest.serialize, + response_deserializer=service.Notification.deserialize, + ) + return self._stubs["get_notification"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AdvisoryNotificationsServiceGrpcTransport",) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/grpc_asyncio.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..98f01e8a0ccf --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/grpc_asyncio.py @@ -0,0 +1,294 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.advisorynotifications_v1.types import service + +from .base import DEFAULT_CLIENT_INFO, AdvisoryNotificationsServiceTransport +from .grpc import AdvisoryNotificationsServiceGrpcTransport + + +class AdvisoryNotificationsServiceGrpcAsyncIOTransport( + AdvisoryNotificationsServiceTransport +): + """gRPC AsyncIO backend transport for AdvisoryNotificationsService. + + Service to manage Security and Privacy Notifications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "advisorynotifications.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "advisorynotifications.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_notifications( + self, + ) -> Callable[ + [service.ListNotificationsRequest], Awaitable[service.ListNotificationsResponse] + ]: + r"""Return a callable for the list notifications method over gRPC. + + Lists notifications under a given parent. + + Returns: + Callable[[~.ListNotificationsRequest], + Awaitable[~.ListNotificationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_notifications" not in self._stubs: + self._stubs["list_notifications"] = self.grpc_channel.unary_unary( + "/google.cloud.advisorynotifications.v1.AdvisoryNotificationsService/ListNotifications", + request_serializer=service.ListNotificationsRequest.serialize, + response_deserializer=service.ListNotificationsResponse.deserialize, + ) + return self._stubs["list_notifications"] + + @property + def get_notification( + self, + ) -> Callable[[service.GetNotificationRequest], Awaitable[service.Notification]]: + r"""Return a callable for the get notification method over gRPC. + + Gets a notification. + + Returns: + Callable[[~.GetNotificationRequest], + Awaitable[~.Notification]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_notification" not in self._stubs: + self._stubs["get_notification"] = self.grpc_channel.unary_unary( + "/google.cloud.advisorynotifications.v1.AdvisoryNotificationsService/GetNotification", + request_serializer=service.GetNotificationRequest.serialize, + response_deserializer=service.Notification.deserialize, + ) + return self._stubs["get_notification"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("AdvisoryNotificationsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/rest.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/rest.py new file mode 100644 index 000000000000..ad043e6edae3 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/rest.py @@ -0,0 +1,439 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.advisorynotifications_v1.types import service + +from .base import AdvisoryNotificationsServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AdvisoryNotificationsServiceRestInterceptor: + """Interceptor for AdvisoryNotificationsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AdvisoryNotificationsServiceRestTransport. + + .. code-block:: python + class MyCustomAdvisoryNotificationsServiceInterceptor(AdvisoryNotificationsServiceRestInterceptor): + def pre_get_notification(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_notification(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_notifications(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_notifications(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AdvisoryNotificationsServiceRestTransport(interceptor=MyCustomAdvisoryNotificationsServiceInterceptor()) + client = AdvisoryNotificationsServiceClient(transport=transport) + + + """ + + def pre_get_notification( + self, + request: service.GetNotificationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.GetNotificationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_notification + + Override in a subclass to manipulate the request or metadata + before they are sent to the AdvisoryNotificationsService server. + """ + return request, metadata + + def post_get_notification( + self, response: service.Notification + ) -> service.Notification: + """Post-rpc interceptor for get_notification + + Override in a subclass to manipulate the response + after it is returned by the AdvisoryNotificationsService server but before + it is returned to user code. + """ + return response + + def pre_list_notifications( + self, + request: service.ListNotificationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.ListNotificationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_notifications + + Override in a subclass to manipulate the request or metadata + before they are sent to the AdvisoryNotificationsService server. + """ + return request, metadata + + def post_list_notifications( + self, response: service.ListNotificationsResponse + ) -> service.ListNotificationsResponse: + """Post-rpc interceptor for list_notifications + + Override in a subclass to manipulate the response + after it is returned by the AdvisoryNotificationsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AdvisoryNotificationsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AdvisoryNotificationsServiceRestInterceptor + + +class AdvisoryNotificationsServiceRestTransport(AdvisoryNotificationsServiceTransport): + """REST backend transport for AdvisoryNotificationsService. + + Service to manage Security and Privacy Notifications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "advisorynotifications.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AdvisoryNotificationsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AdvisoryNotificationsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetNotification(AdvisoryNotificationsServiceRestStub): + def __hash__(self): + return hash("GetNotification") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetNotificationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Notification: + r"""Call the get notification method over HTTP. + + Args: + request (~.service.GetNotificationRequest): + The request object. Request for fetching a notification. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.Notification: + A notification object for notifying + customers about security and privacy + issues. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/notifications/*}", + }, + ] + request, metadata = self._interceptor.pre_get_notification( + request, metadata + ) + pb_request = service.GetNotificationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.Notification() + pb_resp = service.Notification.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_notification(resp) + return resp + + class _ListNotifications(AdvisoryNotificationsServiceRestStub): + def __hash__(self): + return hash("ListNotifications") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListNotificationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListNotificationsResponse: + r"""Call the list notifications method over HTTP. + + Args: + request (~.service.ListNotificationsRequest): + The request object. Request for fetching all + notifications for a given parent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListNotificationsResponse: + Response of ListNotifications + endpoint. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=organizations/*/locations/*}/notifications", + }, + ] + request, metadata = self._interceptor.pre_list_notifications( + request, metadata + ) + pb_request = service.ListNotificationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListNotificationsResponse() + pb_resp = service.ListNotificationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_notifications(resp) + return resp + + @property + def get_notification( + self, + ) -> Callable[[service.GetNotificationRequest], service.Notification]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetNotification(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_notifications( + self, + ) -> Callable[ + [service.ListNotificationsRequest], service.ListNotificationsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListNotifications(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AdvisoryNotificationsServiceRestTransport",) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/__init__.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/__init__.py new file mode 100644 index 000000000000..2cb4295e5d8c --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/__init__.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .service import ( + Attachment, + Csv, + GetNotificationRequest, + ListNotificationsRequest, + ListNotificationsResponse, + LocalizationState, + Message, + Notification, + NotificationType, + NotificationView, + Subject, + Text, +) + +__all__ = ( + "Attachment", + "Csv", + "GetNotificationRequest", + "ListNotificationsRequest", + "ListNotificationsResponse", + "Message", + "Notification", + "Subject", + "Text", + "LocalizationState", + "NotificationType", + "NotificationView", +) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/service.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/service.py new file mode 100644 index 000000000000..251a7d5614ce --- /dev/null +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/service.py @@ -0,0 +1,429 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.advisorynotifications.v1", + manifest={ + "NotificationView", + "LocalizationState", + "NotificationType", + "Notification", + "Text", + "Subject", + "Message", + "Attachment", + "Csv", + "ListNotificationsRequest", + "ListNotificationsResponse", + "GetNotificationRequest", + }, +) + + +class NotificationView(proto.Enum): + r"""Notification view. + + Values: + NOTIFICATION_VIEW_UNSPECIFIED (0): + Not specified, equivalent to BASIC. + BASIC (1): + Server responses only include title, creation + time and Notification ID. Note: for internal use + responses also include the last update time, the + latest message text and whether notification has + attachments. + FULL (2): + Include everything. + """ + NOTIFICATION_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + +class LocalizationState(proto.Enum): + r"""Status of localized text. + + Values: + LOCALIZATION_STATE_UNSPECIFIED (0): + Not used. + LOCALIZATION_STATE_NOT_APPLICABLE (1): + Localization is not applicable for requested + language. This can happen when: + - The requested language was not supported by + Advisory Notifications at the time of + localization (including notifications created + before the localization feature was launched). + - The requested language is English, so only the + English text is returned. + LOCALIZATION_STATE_PENDING (2): + Localization for requested language is in + progress, and not ready yet. + LOCALIZATION_STATE_COMPLETED (3): + Localization for requested language is + completed. + """ + LOCALIZATION_STATE_UNSPECIFIED = 0 + LOCALIZATION_STATE_NOT_APPLICABLE = 1 + LOCALIZATION_STATE_PENDING = 2 + LOCALIZATION_STATE_COMPLETED = 3 + + +class NotificationType(proto.Enum): + r"""Type of notification + + Values: + NOTIFICATION_TYPE_UNSPECIFIED (0): + Default type + NOTIFICATION_TYPE_SECURITY_PRIVACY_ADVISORY (1): + Security and privacy advisory notifications + NOTIFICATION_TYPE_SENSITIVE_ACTIONS (2): + Sensitive action notifications + """ + NOTIFICATION_TYPE_UNSPECIFIED = 0 + NOTIFICATION_TYPE_SECURITY_PRIVACY_ADVISORY = 1 + NOTIFICATION_TYPE_SENSITIVE_ACTIONS = 2 + + +class Notification(proto.Message): + r"""A notification object for notifying customers about security + and privacy issues. + + Attributes: + name (str): + The resource name of the notification. + Format: + organizations/{organization}/locations/{location}/notifications/{notification}. + subject (google.cloud.advisorynotifications_v1.types.Subject): + The subject line of the notification. + messages (MutableSequence[google.cloud.advisorynotifications_v1.types.Message]): + A list of messages in the notification. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time the notification was + created. + notification_type (google.cloud.advisorynotifications_v1.types.NotificationType): + Type of notification + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + subject: "Subject" = proto.Field( + proto.MESSAGE, + number=2, + message="Subject", + ) + messages: MutableSequence["Message"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Message", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + notification_type: "NotificationType" = proto.Field( + proto.ENUM, + number=12, + enum="NotificationType", + ) + + +class Text(proto.Message): + r"""A text object containing the English text and its localized + copies. + + Attributes: + en_text (str): + The English copy. + localized_text (str): + The requested localized copy (if applicable). + localization_state (google.cloud.advisorynotifications_v1.types.LocalizationState): + Status of the localization. + """ + + en_text: str = proto.Field( + proto.STRING, + number=1, + ) + localized_text: str = proto.Field( + proto.STRING, + number=2, + ) + localization_state: "LocalizationState" = proto.Field( + proto.ENUM, + number=3, + enum="LocalizationState", + ) + + +class Subject(proto.Message): + r"""A subject line of a notification. + + Attributes: + text (google.cloud.advisorynotifications_v1.types.Text): + The text content. + """ + + text: "Text" = proto.Field( + proto.MESSAGE, + number=1, + message="Text", + ) + + +class Message(proto.Message): + r"""A message which contains notification details. + + Attributes: + body (google.cloud.advisorynotifications_v1.types.Message.Body): + The message content. + attachments (MutableSequence[google.cloud.advisorynotifications_v1.types.Attachment]): + The attachments to download. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The Message creation timestamp. + localization_time (google.protobuf.timestamp_pb2.Timestamp): + Time when Message was localized + """ + + class Body(proto.Message): + r"""A message body containing text. + + Attributes: + text (google.cloud.advisorynotifications_v1.types.Text): + The text content of the message body. + """ + + text: "Text" = proto.Field( + proto.MESSAGE, + number=1, + message="Text", + ) + + body: Body = proto.Field( + proto.MESSAGE, + number=1, + message=Body, + ) + attachments: MutableSequence["Attachment"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Attachment", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + localization_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class Attachment(proto.Message): + r"""Attachment with specific information about the issue. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + csv (google.cloud.advisorynotifications_v1.types.Csv): + A CSV file attachment. Max size is 10 MB. + + This field is a member of `oneof`_ ``data``. + display_name (str): + The title of the attachment. + """ + + csv: "Csv" = proto.Field( + proto.MESSAGE, + number=2, + oneof="data", + message="Csv", + ) + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Csv(proto.Message): + r"""A representation of a CSV file attachment, as a list of + column headers and a list of data rows. + + Attributes: + headers (MutableSequence[str]): + The list of headers for data columns in a CSV + file. + data_rows (MutableSequence[google.cloud.advisorynotifications_v1.types.Csv.CsvRow]): + The list of data rows in a CSV file, as + string arrays rather than as a single + comma-separated string. + """ + + class CsvRow(proto.Message): + r"""A representation of a single data row in a CSV file. + + Attributes: + entries (MutableSequence[str]): + The data entries in a CSV file row, as a + string array rather than a single + comma-separated string. + """ + + entries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + headers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + data_rows: MutableSequence[CsvRow] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=CsvRow, + ) + + +class ListNotificationsRequest(proto.Message): + r"""Request for fetching all notifications for a given parent. + + Attributes: + parent (str): + Required. The parent, which owns this + collection of notifications. Must be of the form + "organizations/{organization}/locations/{location}". + page_size (int): + The maximum number of notifications to + return. The service may return fewer than this + value. If unspecified or equal to 0, at most 50 + notifications will be returned. The maximum + value is 50; values above 50 will be coerced to + 50. + page_token (str): + A page token returned from a previous + request. When paginating, all other parameters + provided in the request must match the call that + returned the page token. + view (google.cloud.advisorynotifications_v1.types.NotificationView): + Specifies which parts of the notification + resource should be returned in the response. + language_code (str): + ISO code for requested localization language. + If unset, will be interpereted as "en". If the + requested language is valid, but not supported + for this notification, English will be returned + with an "Not applicable" LocalizationState. If + the ISO code is invalid (i.e. not a real + language), this RPC will throw an error. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + view: "NotificationView" = proto.Field( + proto.ENUM, + number=4, + enum="NotificationView", + ) + language_code: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListNotificationsResponse(proto.Message): + r"""Response of ListNotifications endpoint. + + Attributes: + notifications (MutableSequence[google.cloud.advisorynotifications_v1.types.Notification]): + List of notifications under a given parent. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + total_size (int): + Estimation of a total number of + notifications. + """ + + @property + def raw_page(self): + return self + + notifications: MutableSequence["Notification"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Notification", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class GetNotificationRequest(proto.Message): + r"""Request for fetching a notification. + + Attributes: + name (str): + Required. A name of the notification to + retrieve. Format: + organizations/{organization}/locations/{location}/notifications/{notification}. + language_code (str): + ISO code for requested localization language. + If unset, will be interpereted as "en". If the + requested language is valid, but not supported + for this notification, English will be returned + with an "Not applicable" LocalizationState. If + the ISO code is invalid (i.e. not a real + language), this RPC will throw an error. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + language_code: str = proto.Field( + proto.STRING, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-advisorynotifications/mypy.ini b/packages/google-cloud-advisorynotifications/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-advisorynotifications/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-advisorynotifications/noxfile.py b/packages/google-cloud-advisorynotifications/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-cloud-advisorynotifications/renovate.json b/packages/google-cloud-advisorynotifications/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_get_notification_async.py b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_get_notification_async.py new file mode 100644 index 000000000000..7859a4c1df83 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_get_notification_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetNotification +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-advisorynotifications + + +# [START advisorynotifications_v1_generated_AdvisoryNotificationsService_GetNotification_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import advisorynotifications_v1 + + +async def sample_get_notification(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient() + + # Initialize request argument(s) + request = advisorynotifications_v1.GetNotificationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_notification(request=request) + + # Handle the response + print(response) + +# [END advisorynotifications_v1_generated_AdvisoryNotificationsService_GetNotification_async] diff --git a/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_get_notification_sync.py b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_get_notification_sync.py new file mode 100644 index 000000000000..5937aac5ec15 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_get_notification_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetNotification +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-advisorynotifications + + +# [START advisorynotifications_v1_generated_AdvisoryNotificationsService_GetNotification_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import advisorynotifications_v1 + + +def sample_get_notification(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceClient() + + # Initialize request argument(s) + request = advisorynotifications_v1.GetNotificationRequest( + name="name_value", + ) + + # Make the request + response = client.get_notification(request=request) + + # Handle the response + print(response) + +# [END advisorynotifications_v1_generated_AdvisoryNotificationsService_GetNotification_sync] diff --git a/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_list_notifications_async.py b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_list_notifications_async.py new file mode 100644 index 000000000000..f7a73154c615 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_list_notifications_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListNotifications +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-advisorynotifications + + +# [START advisorynotifications_v1_generated_AdvisoryNotificationsService_ListNotifications_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import advisorynotifications_v1 + + +async def sample_list_notifications(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient() + + # Initialize request argument(s) + request = advisorynotifications_v1.ListNotificationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_notifications(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END advisorynotifications_v1_generated_AdvisoryNotificationsService_ListNotifications_async] diff --git a/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_list_notifications_sync.py b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_list_notifications_sync.py new file mode 100644 index 000000000000..50d29bdc9f87 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_list_notifications_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListNotifications +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-advisorynotifications + + +# [START advisorynotifications_v1_generated_AdvisoryNotificationsService_ListNotifications_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import advisorynotifications_v1 + + +def sample_list_notifications(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceClient() + + # Initialize request argument(s) + request = advisorynotifications_v1.ListNotificationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_notifications(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END advisorynotifications_v1_generated_AdvisoryNotificationsService_ListNotifications_sync] diff --git a/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json b/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json new file mode 100644 index 000000000000..ec626340f4aa --- /dev/null +++ b/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json @@ -0,0 +1,337 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.advisorynotifications.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-advisorynotifications", + "version": "0.2.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient", + "shortName": "AdvisoryNotificationsServiceAsyncClient" + }, + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient.get_notification", + "method": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService.GetNotification", + "service": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService", + "shortName": "AdvisoryNotificationsService" + }, + "shortName": "GetNotification" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.advisorynotifications_v1.types.GetNotificationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.advisorynotifications_v1.types.Notification", + "shortName": "get_notification" + }, + "description": "Sample for GetNotification", + "file": "advisorynotifications_v1_generated_advisory_notifications_service_get_notification_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "advisorynotifications_v1_generated_AdvisoryNotificationsService_GetNotification_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "advisorynotifications_v1_generated_advisory_notifications_service_get_notification_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceClient", + "shortName": "AdvisoryNotificationsServiceClient" + }, + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceClient.get_notification", + "method": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService.GetNotification", + "service": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService", + "shortName": "AdvisoryNotificationsService" + }, + "shortName": "GetNotification" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.advisorynotifications_v1.types.GetNotificationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.advisorynotifications_v1.types.Notification", + "shortName": "get_notification" + }, + "description": "Sample for GetNotification", + "file": "advisorynotifications_v1_generated_advisory_notifications_service_get_notification_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "advisorynotifications_v1_generated_AdvisoryNotificationsService_GetNotification_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "advisorynotifications_v1_generated_advisory_notifications_service_get_notification_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient", + "shortName": "AdvisoryNotificationsServiceAsyncClient" + }, + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient.list_notifications", + "method": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService.ListNotifications", + "service": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService", + "shortName": "AdvisoryNotificationsService" + }, + "shortName": "ListNotifications" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.advisorynotifications_v1.types.ListNotificationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.advisorynotifications_v1.services.advisory_notifications_service.pagers.ListNotificationsAsyncPager", + "shortName": "list_notifications" + }, + "description": "Sample for ListNotifications", + "file": "advisorynotifications_v1_generated_advisory_notifications_service_list_notifications_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "advisorynotifications_v1_generated_AdvisoryNotificationsService_ListNotifications_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "advisorynotifications_v1_generated_advisory_notifications_service_list_notifications_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceClient", + "shortName": "AdvisoryNotificationsServiceClient" + }, + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceClient.list_notifications", + "method": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService.ListNotifications", + "service": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService", + "shortName": "AdvisoryNotificationsService" + }, + "shortName": "ListNotifications" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.advisorynotifications_v1.types.ListNotificationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.advisorynotifications_v1.services.advisory_notifications_service.pagers.ListNotificationsPager", + "shortName": "list_notifications" + }, + "description": "Sample for ListNotifications", + "file": "advisorynotifications_v1_generated_advisory_notifications_service_list_notifications_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "advisorynotifications_v1_generated_AdvisoryNotificationsService_ListNotifications_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "advisorynotifications_v1_generated_advisory_notifications_service_list_notifications_sync.py" + } + ] +} diff --git a/packages/google-cloud-advisorynotifications/scripts/decrypt-secrets.sh b/packages/google-cloud-advisorynotifications/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-advisorynotifications/scripts/fixup_advisorynotifications_v1_keywords.py b/packages/google-cloud-advisorynotifications/scripts/fixup_advisorynotifications_v1_keywords.py new file mode 100644 index 000000000000..0125dd96d7e6 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/scripts/fixup_advisorynotifications_v1_keywords.py @@ -0,0 +1,177 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class advisorynotificationsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'get_notification': ('name', 'language_code', ), + 'list_notifications': ('parent', 'page_size', 'page_token', 'view', 'language_code', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=advisorynotificationsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the advisorynotifications client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-advisorynotifications/setup.py b/packages/google-cloud-advisorynotifications/setup.py new file mode 100644 index 000000000000..e35d9e114e22 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-advisorynotifications" + + +description = "Google Cloud Advisorynotifications API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/advisorynotifications/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-advisorynotifications/testing/.gitignore b/packages/google-cloud-advisorynotifications/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-advisorynotifications/testing/constraints-3.10.txt b/packages/google-cloud-advisorynotifications/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-advisorynotifications/testing/constraints-3.11.txt b/packages/google-cloud-advisorynotifications/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-advisorynotifications/testing/constraints-3.12.txt b/packages/google-cloud-advisorynotifications/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-advisorynotifications/testing/constraints-3.7.txt b/packages/google-cloud-advisorynotifications/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-advisorynotifications/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-advisorynotifications/testing/constraints-3.8.txt b/packages/google-cloud-advisorynotifications/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-advisorynotifications/testing/constraints-3.9.txt b/packages/google-cloud-advisorynotifications/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-advisorynotifications/tests/__init__.py b/packages/google-cloud-advisorynotifications/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-advisorynotifications/tests/unit/__init__.py b/packages/google-cloud-advisorynotifications/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-advisorynotifications/tests/unit/gapic/__init__.py b/packages/google-cloud-advisorynotifications/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/__init__.py b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py new file mode 100644 index 000000000000..fc61e01d7ca7 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py @@ -0,0 +1,2856 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.advisorynotifications_v1.services.advisory_notifications_service import ( + AdvisoryNotificationsServiceAsyncClient, + AdvisoryNotificationsServiceClient, + pagers, + transports, +) +from google.cloud.advisorynotifications_v1.types import service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AdvisoryNotificationsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + AdvisoryNotificationsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + AdvisoryNotificationsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AdvisoryNotificationsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AdvisoryNotificationsServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + AdvisoryNotificationsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AdvisoryNotificationsServiceClient, "grpc"), + (AdvisoryNotificationsServiceAsyncClient, "grpc_asyncio"), + (AdvisoryNotificationsServiceClient, "rest"), + ], +) +def test_advisory_notifications_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "advisorynotifications.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://advisorynotifications.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AdvisoryNotificationsServiceGrpcTransport, "grpc"), + (transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AdvisoryNotificationsServiceRestTransport, "rest"), + ], +) +def test_advisory_notifications_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AdvisoryNotificationsServiceClient, "grpc"), + (AdvisoryNotificationsServiceAsyncClient, "grpc_asyncio"), + (AdvisoryNotificationsServiceClient, "rest"), + ], +) +def test_advisory_notifications_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "advisorynotifications.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://advisorynotifications.googleapis.com" + ) + + +def test_advisory_notifications_service_client_get_transport_class(): + transport = AdvisoryNotificationsServiceClient.get_transport_class() + available_transports = [ + transports.AdvisoryNotificationsServiceGrpcTransport, + transports.AdvisoryNotificationsServiceRestTransport, + ] + assert transport in available_transports + + transport = AdvisoryNotificationsServiceClient.get_transport_class("grpc") + assert transport == transports.AdvisoryNotificationsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AdvisoryNotificationsServiceClient, + transports.AdvisoryNotificationsServiceGrpcTransport, + "grpc", + ), + ( + AdvisoryNotificationsServiceAsyncClient, + transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + AdvisoryNotificationsServiceClient, + transports.AdvisoryNotificationsServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + AdvisoryNotificationsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AdvisoryNotificationsServiceClient), +) +@mock.patch.object( + AdvisoryNotificationsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AdvisoryNotificationsServiceAsyncClient), +) +def test_advisory_notifications_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + AdvisoryNotificationsServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + AdvisoryNotificationsServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + AdvisoryNotificationsServiceClient, + transports.AdvisoryNotificationsServiceGrpcTransport, + "grpc", + "true", + ), + ( + AdvisoryNotificationsServiceAsyncClient, + transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + AdvisoryNotificationsServiceClient, + transports.AdvisoryNotificationsServiceGrpcTransport, + "grpc", + "false", + ), + ( + AdvisoryNotificationsServiceAsyncClient, + transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + AdvisoryNotificationsServiceClient, + transports.AdvisoryNotificationsServiceRestTransport, + "rest", + "true", + ), + ( + AdvisoryNotificationsServiceClient, + transports.AdvisoryNotificationsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + AdvisoryNotificationsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AdvisoryNotificationsServiceClient), +) +@mock.patch.object( + AdvisoryNotificationsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AdvisoryNotificationsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_advisory_notifications_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [AdvisoryNotificationsServiceClient, AdvisoryNotificationsServiceAsyncClient], +) +@mock.patch.object( + AdvisoryNotificationsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AdvisoryNotificationsServiceClient), +) +@mock.patch.object( + AdvisoryNotificationsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AdvisoryNotificationsServiceAsyncClient), +) +def test_advisory_notifications_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AdvisoryNotificationsServiceClient, + transports.AdvisoryNotificationsServiceGrpcTransport, + "grpc", + ), + ( + AdvisoryNotificationsServiceAsyncClient, + transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + AdvisoryNotificationsServiceClient, + transports.AdvisoryNotificationsServiceRestTransport, + "rest", + ), + ], +) +def test_advisory_notifications_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AdvisoryNotificationsServiceClient, + transports.AdvisoryNotificationsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AdvisoryNotificationsServiceAsyncClient, + transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + AdvisoryNotificationsServiceClient, + transports.AdvisoryNotificationsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_advisory_notifications_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_advisory_notifications_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.advisorynotifications_v1.services.advisory_notifications_service.transports.AdvisoryNotificationsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AdvisoryNotificationsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AdvisoryNotificationsServiceClient, + transports.AdvisoryNotificationsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AdvisoryNotificationsServiceAsyncClient, + transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_advisory_notifications_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "advisorynotifications.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="advisorynotifications.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListNotificationsRequest, + dict, + ], +) +def test_list_notifications(request_type, transport: str = "grpc"): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notifications), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListNotificationsResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + response = client.list_notifications(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListNotificationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNotificationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_notifications_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notifications), "__call__" + ) as call: + client.list_notifications() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListNotificationsRequest() + + +@pytest.mark.asyncio +async def test_list_notifications_async( + transport: str = "grpc_asyncio", request_type=service.ListNotificationsRequest +): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notifications), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListNotificationsResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.list_notifications(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListNotificationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNotificationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_list_notifications_async_from_dict(): + await test_list_notifications_async(request_type=dict) + + +def test_list_notifications_field_headers(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListNotificationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notifications), "__call__" + ) as call: + call.return_value = service.ListNotificationsResponse() + client.list_notifications(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_notifications_field_headers_async(): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListNotificationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notifications), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListNotificationsResponse() + ) + await client.list_notifications(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_notifications_flattened(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notifications), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListNotificationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_notifications( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_notifications_flattened_error(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_notifications( + service.ListNotificationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_notifications_flattened_async(): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notifications), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListNotificationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListNotificationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_notifications( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_notifications_flattened_error_async(): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_notifications( + service.ListNotificationsRequest(), + parent="parent_value", + ) + + +def test_list_notifications_pager(transport_name: str = "grpc"): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notifications), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + service.Notification(), + service.Notification(), + ], + next_page_token="abc", + ), + service.ListNotificationsResponse( + notifications=[], + next_page_token="def", + ), + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + ], + next_page_token="ghi", + ), + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + service.Notification(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_notifications(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, service.Notification) for i in results) + + +def test_list_notifications_pages(transport_name: str = "grpc"): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notifications), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + service.Notification(), + service.Notification(), + ], + next_page_token="abc", + ), + service.ListNotificationsResponse( + notifications=[], + next_page_token="def", + ), + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + ], + next_page_token="ghi", + ), + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + service.Notification(), + ], + ), + RuntimeError, + ) + pages = list(client.list_notifications(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_notifications_async_pager(): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notifications), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + service.Notification(), + service.Notification(), + ], + next_page_token="abc", + ), + service.ListNotificationsResponse( + notifications=[], + next_page_token="def", + ), + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + ], + next_page_token="ghi", + ), + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + service.Notification(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_notifications( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, service.Notification) for i in responses) + + +@pytest.mark.asyncio +async def test_list_notifications_async_pages(): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_notifications), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + service.Notification(), + service.Notification(), + ], + next_page_token="abc", + ), + service.ListNotificationsResponse( + notifications=[], + next_page_token="def", + ), + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + ], + next_page_token="ghi", + ), + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + service.Notification(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_notifications(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetNotificationRequest, + dict, + ], +) +def test_get_notification(request_type, transport: str = "grpc"): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_notification), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.Notification( + name="name_value", + notification_type=service.NotificationType.NOTIFICATION_TYPE_SECURITY_PRIVACY_ADVISORY, + ) + response = client.get_notification(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetNotificationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Notification) + assert response.name == "name_value" + assert ( + response.notification_type + == service.NotificationType.NOTIFICATION_TYPE_SECURITY_PRIVACY_ADVISORY + ) + + +def test_get_notification_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_notification), "__call__") as call: + client.get_notification() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetNotificationRequest() + + +@pytest.mark.asyncio +async def test_get_notification_async( + transport: str = "grpc_asyncio", request_type=service.GetNotificationRequest +): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_notification), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.Notification( + name="name_value", + notification_type=service.NotificationType.NOTIFICATION_TYPE_SECURITY_PRIVACY_ADVISORY, + ) + ) + response = await client.get_notification(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetNotificationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Notification) + assert response.name == "name_value" + assert ( + response.notification_type + == service.NotificationType.NOTIFICATION_TYPE_SECURITY_PRIVACY_ADVISORY + ) + + +@pytest.mark.asyncio +async def test_get_notification_async_from_dict(): + await test_get_notification_async(request_type=dict) + + +def test_get_notification_field_headers(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetNotificationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_notification), "__call__") as call: + call.return_value = service.Notification() + client.get_notification(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_notification_field_headers_async(): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetNotificationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_notification), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.Notification() + ) + await client.get_notification(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_notification_flattened(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_notification), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.Notification() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_notification( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_notification_flattened_error(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_notification( + service.GetNotificationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_notification_flattened_async(): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_notification), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.Notification() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.Notification() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_notification( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_notification_flattened_error_async(): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_notification( + service.GetNotificationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListNotificationsRequest, + dict, + ], +) +def test_list_notifications_rest(request_type): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListNotificationsResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListNotificationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_notifications(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNotificationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_notifications_rest_required_fields( + request_type=service.ListNotificationsRequest, +): + transport_class = transports.AdvisoryNotificationsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_notifications._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_notifications._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "language_code", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListNotificationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListNotificationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_notifications(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_notifications_rest_unset_required_fields(): + transport = transports.AdvisoryNotificationsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_notifications._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "languageCode", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_notifications_rest_interceptors(null_interceptor): + transport = transports.AdvisoryNotificationsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AdvisoryNotificationsServiceRestInterceptor(), + ) + client = AdvisoryNotificationsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AdvisoryNotificationsServiceRestInterceptor, + "post_list_notifications", + ) as post, mock.patch.object( + transports.AdvisoryNotificationsServiceRestInterceptor, "pre_list_notifications" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListNotificationsRequest.pb( + service.ListNotificationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListNotificationsResponse.to_json( + service.ListNotificationsResponse() + ) + + request = service.ListNotificationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListNotificationsResponse() + + client.list_notifications( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_notifications_rest_bad_request( + transport: str = "rest", request_type=service.ListNotificationsRequest +): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_notifications(request) + + +def test_list_notifications_rest_flattened(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListNotificationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListNotificationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_notifications(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=organizations/*/locations/*}/notifications" + % client.transport._host, + args[1], + ) + + +def test_list_notifications_rest_flattened_error(transport: str = "rest"): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_notifications( + service.ListNotificationsRequest(), + parent="parent_value", + ) + + +def test_list_notifications_rest_pager(transport: str = "rest"): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + service.Notification(), + service.Notification(), + ], + next_page_token="abc", + ), + service.ListNotificationsResponse( + notifications=[], + next_page_token="def", + ), + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + ], + next_page_token="ghi", + ), + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + service.Notification(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListNotificationsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + pager = client.list_notifications(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, service.Notification) for i in results) + + pages = list(client.list_notifications(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetNotificationRequest, + dict, + ], +) +def test_get_notification_rest(request_type): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/notifications/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.Notification( + name="name_value", + notification_type=service.NotificationType.NOTIFICATION_TYPE_SECURITY_PRIVACY_ADVISORY, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.Notification.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_notification(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Notification) + assert response.name == "name_value" + assert ( + response.notification_type + == service.NotificationType.NOTIFICATION_TYPE_SECURITY_PRIVACY_ADVISORY + ) + + +def test_get_notification_rest_required_fields( + request_type=service.GetNotificationRequest, +): + transport_class = transports.AdvisoryNotificationsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_notification._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_notification._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("language_code",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.Notification() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.Notification.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_notification(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_notification_rest_unset_required_fields(): + transport = transports.AdvisoryNotificationsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_notification._get_unset_required_fields({}) + assert set(unset_fields) == (set(("languageCode",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_notification_rest_interceptors(null_interceptor): + transport = transports.AdvisoryNotificationsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AdvisoryNotificationsServiceRestInterceptor(), + ) + client = AdvisoryNotificationsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AdvisoryNotificationsServiceRestInterceptor, "post_get_notification" + ) as post, mock.patch.object( + transports.AdvisoryNotificationsServiceRestInterceptor, "pre_get_notification" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetNotificationRequest.pb(service.GetNotificationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.Notification.to_json(service.Notification()) + + request = service.GetNotificationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.Notification() + + client.get_notification( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_notification_rest_bad_request( + transport: str = "rest", request_type=service.GetNotificationRequest +): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/notifications/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_notification(request) + + +def test_get_notification_rest_flattened(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.Notification() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/notifications/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.Notification.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_notification(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=organizations/*/locations/*/notifications/*}" + % client.transport._host, + args[1], + ) + + +def test_get_notification_rest_flattened_error(transport: str = "rest"): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_notification( + service.GetNotificationRequest(), + name="name_value", + ) + + +def test_get_notification_rest_error(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AdvisoryNotificationsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AdvisoryNotificationsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AdvisoryNotificationsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AdvisoryNotificationsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AdvisoryNotificationsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AdvisoryNotificationsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AdvisoryNotificationsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AdvisoryNotificationsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AdvisoryNotificationsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AdvisoryNotificationsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AdvisoryNotificationsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AdvisoryNotificationsServiceGrpcTransport, + transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport, + transports.AdvisoryNotificationsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AdvisoryNotificationsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AdvisoryNotificationsServiceGrpcTransport, + ) + + +def test_advisory_notifications_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AdvisoryNotificationsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_advisory_notifications_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.advisorynotifications_v1.services.advisory_notifications_service.transports.AdvisoryNotificationsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AdvisoryNotificationsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_notifications", + "get_notification", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_advisory_notifications_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.advisorynotifications_v1.services.advisory_notifications_service.transports.AdvisoryNotificationsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AdvisoryNotificationsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_advisory_notifications_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.advisorynotifications_v1.services.advisory_notifications_service.transports.AdvisoryNotificationsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AdvisoryNotificationsServiceTransport() + adc.assert_called_once() + + +def test_advisory_notifications_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AdvisoryNotificationsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AdvisoryNotificationsServiceGrpcTransport, + transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport, + ], +) +def test_advisory_notifications_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AdvisoryNotificationsServiceGrpcTransport, + transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport, + transports.AdvisoryNotificationsServiceRestTransport, + ], +) +def test_advisory_notifications_service_transport_auth_gdch_credentials( + transport_class, +): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AdvisoryNotificationsServiceGrpcTransport, grpc_helpers), + ( + transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +def test_advisory_notifications_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "advisorynotifications.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="advisorynotifications.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AdvisoryNotificationsServiceGrpcTransport, + transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport, + ], +) +def test_advisory_notifications_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_advisory_notifications_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AdvisoryNotificationsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_advisory_notifications_service_host_no_port(transport_name): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="advisorynotifications.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "advisorynotifications.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://advisorynotifications.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_advisory_notifications_service_host_with_port(transport_name): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="advisorynotifications.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "advisorynotifications.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://advisorynotifications.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_advisory_notifications_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AdvisoryNotificationsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AdvisoryNotificationsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_notifications._session + session2 = client2.transport.list_notifications._session + assert session1 != session2 + session1 = client1.transport.get_notification._session + session2 = client2.transport.get_notification._session + assert session1 != session2 + + +def test_advisory_notifications_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AdvisoryNotificationsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_advisory_notifications_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AdvisoryNotificationsServiceGrpcTransport, + transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport, + ], +) +def test_advisory_notifications_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AdvisoryNotificationsServiceGrpcTransport, + transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport, + ], +) +def test_advisory_notifications_service_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_notification_path(): + organization = "squid" + location = "clam" + notification = "whelk" + expected = "organizations/{organization}/locations/{location}/notifications/{notification}".format( + organization=organization, + location=location, + notification=notification, + ) + actual = AdvisoryNotificationsServiceClient.notification_path( + organization, location, notification + ) + assert expected == actual + + +def test_parse_notification_path(): + expected = { + "organization": "octopus", + "location": "oyster", + "notification": "nudibranch", + } + path = AdvisoryNotificationsServiceClient.notification_path(**expected) + + # Check that the path construction is reversible. + actual = AdvisoryNotificationsServiceClient.parse_notification_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AdvisoryNotificationsServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = AdvisoryNotificationsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AdvisoryNotificationsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AdvisoryNotificationsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = AdvisoryNotificationsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AdvisoryNotificationsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AdvisoryNotificationsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = AdvisoryNotificationsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AdvisoryNotificationsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = AdvisoryNotificationsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = AdvisoryNotificationsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AdvisoryNotificationsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AdvisoryNotificationsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = AdvisoryNotificationsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AdvisoryNotificationsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AdvisoryNotificationsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AdvisoryNotificationsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AdvisoryNotificationsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + AdvisoryNotificationsServiceClient, + transports.AdvisoryNotificationsServiceGrpcTransport, + ), + ( + AdvisoryNotificationsServiceAsyncClient, + transports.AdvisoryNotificationsServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-alloydb/.OwlBot.yaml b/packages/google-cloud-alloydb/.OwlBot.yaml new file mode 100644 index 000000000000..0c0c9697fa48 --- /dev/null +++ b/packages/google-cloud-alloydb/.OwlBot.yaml @@ -0,0 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/alloydb/(.*)/.*-py + dest: /owl-bot-staging/google-cloud-alloydb/$1 diff --git a/packages/google-cloud-alloydb/.coveragerc b/packages/google-cloud-alloydb/.coveragerc new file mode 100644 index 000000000000..307a1b7e14a0 --- /dev/null +++ b/packages/google-cloud-alloydb/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/alloydb/__init__.py + google/cloud/alloydb/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-alloydb/.flake8 b/packages/google-cloud-alloydb/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-cloud-alloydb/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-alloydb/.gitignore b/packages/google-cloud-alloydb/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-alloydb/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-alloydb/.repo-metadata.json b/packages/google-cloud-alloydb/.repo-metadata.json new file mode 100644 index 000000000000..8365d2c8aa4d --- /dev/null +++ b/packages/google-cloud-alloydb/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "alloydb", + "name_pretty": "AlloyDB", + "api_description": "", + "product_documentation": "https://cloud.google.com/alloydb/", + "client_documentation": "https://cloud.google.com/python/docs/reference/alloydb/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-alloydb", + "api_id": "alloydb.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "alloydb" +} diff --git a/packages/google-cloud-alloydb/CHANGELOG.md b/packages/google-cloud-alloydb/CHANGELOG.md new file mode 100644 index 000000000000..31a95202a7f3 --- /dev/null +++ b/packages/google-cloud-alloydb/CHANGELOG.md @@ -0,0 +1,17 @@ +# Changelog + +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.1.0...google-cloud-alloydb-v0.1.1) (2023-03-25) + + +### Documentation + +* Fix formatting of request arg in docstring ([#10867](https://github.com/googleapis/google-cloud-python/issues/10867)) ([d34a425](https://github.com/googleapis/google-cloud-python/commit/d34a425f7d0f02bebaf20d24b725b8c25c699697)) + +## 0.1.0 (2023-03-06) + + +### Features + +* add initial files for google.cloud.alloydb.v1 ([#10847](https://github.com/googleapis/google-cloud-python/issues/10847)) ([c9ebf82](https://github.com/googleapis/google-cloud-python/commit/c9ebf8298d0164d382c278a1a8c95cccc3dd7491)) + +## Changelog diff --git a/packages/google-cloud-alloydb/CODE_OF_CONDUCT.md b/packages/google-cloud-alloydb/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-alloydb/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-alloydb/CONTRIBUTING.rst b/packages/google-cloud-alloydb/CONTRIBUTING.rst new file mode 100644 index 000000000000..378d7b535ab2 --- /dev/null +++ b/packages/google-cloud-alloydb/CONTRIBUTING.rst @@ -0,0 +1,281 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-alloydb + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +We also explicitly decided to support Python 3 beginning with version 3.7. +Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-alloydb/LICENSE b/packages/google-cloud-alloydb/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-alloydb/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-alloydb/MANIFEST.in b/packages/google-cloud-alloydb/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-cloud-alloydb/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-alloydb/README.rst b/packages/google-cloud-alloydb/README.rst new file mode 100644 index 000000000000..690405d2b972 --- /dev/null +++ b/packages/google-cloud-alloydb/README.rst @@ -0,0 +1,107 @@ +Python Client for AlloyDB +========================= + +|preview| |pypi| |versions| + +`AlloyDB`_: + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-alloydb.svg + :target: https://pypi.org/project/google-cloud-alloydb/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-alloydb.svg + :target: https://pypi.org/project/google-cloud-alloydb/ +.. _AlloyDB: https://cloud.google.com/alloydb/ +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/alloydb/latest +.. _Product Documentation: https://cloud.google.com/alloydb/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the AlloyDB.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the AlloyDB.: https://cloud.google.com/alloydb/ +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-alloydb + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-alloydb + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for AlloyDB + to see other available methods on the client. +- Read the `AlloyDB Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _AlloyDB Product documentation: https://cloud.google.com/alloydb/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-alloydb/SECURITY.md b/packages/google-cloud-alloydb/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-alloydb/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-alloydb/docs/CHANGELOG.md b/packages/google-cloud-alloydb/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-alloydb/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-alloydb/docs/README.rst b/packages/google-cloud-alloydb/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-alloydb/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-alloydb/docs/_static/custom.css b/packages/google-cloud-alloydb/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-alloydb/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-alloydb/docs/_templates/layout.html b/packages/google-cloud-alloydb/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-alloydb/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-alloydb/docs/alloydb_v1/alloy_db_admin.rst b/packages/google-cloud-alloydb/docs/alloydb_v1/alloy_db_admin.rst new file mode 100644 index 000000000000..5ef26574dd58 --- /dev/null +++ b/packages/google-cloud-alloydb/docs/alloydb_v1/alloy_db_admin.rst @@ -0,0 +1,10 @@ +AlloyDBAdmin +------------------------------ + +.. automodule:: google.cloud.alloydb_v1.services.alloy_db_admin + :members: + :inherited-members: + +.. automodule:: google.cloud.alloydb_v1.services.alloy_db_admin.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-alloydb/docs/alloydb_v1/services.rst b/packages/google-cloud-alloydb/docs/alloydb_v1/services.rst new file mode 100644 index 000000000000..f8842c56fe2f --- /dev/null +++ b/packages/google-cloud-alloydb/docs/alloydb_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Alloydb v1 API +======================================== +.. toctree:: + :maxdepth: 2 + + alloy_db_admin diff --git a/packages/google-cloud-alloydb/docs/alloydb_v1/types.rst b/packages/google-cloud-alloydb/docs/alloydb_v1/types.rst new file mode 100644 index 000000000000..8491d6038c4c --- /dev/null +++ b/packages/google-cloud-alloydb/docs/alloydb_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Alloydb v1 API +===================================== + +.. automodule:: google.cloud.alloydb_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-alloydb/docs/alloydb_v1alpha/alloy_db_admin.rst b/packages/google-cloud-alloydb/docs/alloydb_v1alpha/alloy_db_admin.rst new file mode 100644 index 000000000000..15e3c4a2f15b --- /dev/null +++ b/packages/google-cloud-alloydb/docs/alloydb_v1alpha/alloy_db_admin.rst @@ -0,0 +1,10 @@ +AlloyDBAdmin +------------------------------ + +.. automodule:: google.cloud.alloydb_v1alpha.services.alloy_db_admin + :members: + :inherited-members: + +.. automodule:: google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-alloydb/docs/alloydb_v1alpha/services.rst b/packages/google-cloud-alloydb/docs/alloydb_v1alpha/services.rst new file mode 100644 index 000000000000..51612828ca6b --- /dev/null +++ b/packages/google-cloud-alloydb/docs/alloydb_v1alpha/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Alloydb v1alpha API +============================================= +.. toctree:: + :maxdepth: 2 + + alloy_db_admin diff --git a/packages/google-cloud-alloydb/docs/alloydb_v1alpha/types.rst b/packages/google-cloud-alloydb/docs/alloydb_v1alpha/types.rst new file mode 100644 index 000000000000..2b74c1903ffc --- /dev/null +++ b/packages/google-cloud-alloydb/docs/alloydb_v1alpha/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Alloydb v1alpha API +========================================== + +.. automodule:: google.cloud.alloydb_v1alpha.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-alloydb/docs/alloydb_v1beta/alloy_db_admin.rst b/packages/google-cloud-alloydb/docs/alloydb_v1beta/alloy_db_admin.rst new file mode 100644 index 000000000000..2998426115e9 --- /dev/null +++ b/packages/google-cloud-alloydb/docs/alloydb_v1beta/alloy_db_admin.rst @@ -0,0 +1,10 @@ +AlloyDBAdmin +------------------------------ + +.. automodule:: google.cloud.alloydb_v1beta.services.alloy_db_admin + :members: + :inherited-members: + +.. automodule:: google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-alloydb/docs/alloydb_v1beta/services.rst b/packages/google-cloud-alloydb/docs/alloydb_v1beta/services.rst new file mode 100644 index 000000000000..f0f4f416e44f --- /dev/null +++ b/packages/google-cloud-alloydb/docs/alloydb_v1beta/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Alloydb v1beta API +============================================ +.. toctree:: + :maxdepth: 2 + + alloy_db_admin diff --git a/packages/google-cloud-alloydb/docs/alloydb_v1beta/types.rst b/packages/google-cloud-alloydb/docs/alloydb_v1beta/types.rst new file mode 100644 index 000000000000..979a4ec6e43a --- /dev/null +++ b/packages/google-cloud-alloydb/docs/alloydb_v1beta/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Alloydb v1beta API +========================================= + +.. automodule:: google.cloud.alloydb_v1beta.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-alloydb/docs/conf.py b/packages/google-cloud-alloydb/docs/conf.py new file mode 100644 index 000000000000..8b330b2a3459 --- /dev/null +++ b/packages/google-cloud-alloydb/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-alloydb documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-alloydb" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-alloydb", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-alloydb-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-alloydb.tex", + "google-cloud-alloydb Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-alloydb", + "google-cloud-alloydb Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-alloydb", + "google-cloud-alloydb Documentation", + author, + "google-cloud-alloydb", + "google-cloud-alloydb Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-alloydb/docs/index.rst b/packages/google-cloud-alloydb/docs/index.rst new file mode 100644 index 000000000000..281bf9de7d3a --- /dev/null +++ b/packages/google-cloud-alloydb/docs/index.rst @@ -0,0 +1,42 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of AlloyDB. +By default, you will get version ``alloydb_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + alloydb_v1/services + alloydb_v1/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + alloydb_v1alpha/services + alloydb_v1alpha/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + alloydb_v1beta/services + alloydb_v1beta/types + + +Changelog +--------- + +For a list of all ``google-cloud-alloydb`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-alloydb/docs/multiprocessing.rst b/packages/google-cloud-alloydb/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-alloydb/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb/__init__.py new file mode 100644 index 000000000000..cb19aea5678e --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb/__init__.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.alloydb import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.alloydb_v1.services.alloy_db_admin.async_client import ( + AlloyDBAdminAsyncClient, +) +from google.cloud.alloydb_v1.services.alloy_db_admin.client import AlloyDBAdminClient +from google.cloud.alloydb_v1.types.resources import ( + AutomatedBackupPolicy, + Backup, + BackupSource, + Cluster, + DatabaseVersion, + EncryptionConfig, + EncryptionInfo, + Instance, + InstanceView, + MigrationSource, + SslConfig, + SupportedDatabaseFlag, + UserPassword, +) +from google.cloud.alloydb_v1.types.service import ( + BatchCreateInstancesMetadata, + BatchCreateInstancesRequest, + BatchCreateInstancesResponse, + BatchCreateInstanceStatus, + CreateBackupRequest, + CreateClusterRequest, + CreateInstanceRequest, + CreateInstanceRequests, + DeleteBackupRequest, + DeleteClusterRequest, + DeleteInstanceRequest, + FailoverInstanceRequest, + GetBackupRequest, + GetClusterRequest, + GetInstanceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListClustersRequest, + ListClustersResponse, + ListInstancesRequest, + ListInstancesResponse, + ListSupportedDatabaseFlagsRequest, + ListSupportedDatabaseFlagsResponse, + OperationMetadata, + RestartInstanceRequest, + RestoreClusterRequest, + UpdateBackupRequest, + UpdateClusterRequest, + UpdateInstanceRequest, +) + +__all__ = ( + "AlloyDBAdminClient", + "AlloyDBAdminAsyncClient", + "AutomatedBackupPolicy", + "Backup", + "BackupSource", + "Cluster", + "EncryptionConfig", + "EncryptionInfo", + "Instance", + "MigrationSource", + "SslConfig", + "SupportedDatabaseFlag", + "UserPassword", + "DatabaseVersion", + "InstanceView", + "BatchCreateInstancesMetadata", + "BatchCreateInstancesRequest", + "BatchCreateInstancesResponse", + "BatchCreateInstanceStatus", + "CreateBackupRequest", + "CreateClusterRequest", + "CreateInstanceRequest", + "CreateInstanceRequests", + "DeleteBackupRequest", + "DeleteClusterRequest", + "DeleteInstanceRequest", + "FailoverInstanceRequest", + "GetBackupRequest", + "GetClusterRequest", + "GetInstanceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "ListClustersRequest", + "ListClustersResponse", + "ListInstancesRequest", + "ListInstancesResponse", + "ListSupportedDatabaseFlagsRequest", + "ListSupportedDatabaseFlagsResponse", + "OperationMetadata", + "RestartInstanceRequest", + "RestoreClusterRequest", + "UpdateBackupRequest", + "UpdateClusterRequest", + "UpdateInstanceRequest", +) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py new file mode 100644 index 000000000000..7e609eff7b01 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb/py.typed b/packages/google-cloud-alloydb/google/cloud/alloydb/py.typed new file mode 100644 index 000000000000..c71d0f20b482 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-alloydb package uses inline types. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/__init__.py new file mode 100644 index 000000000000..a57bf9634bda --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/__init__.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.alloydb_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.alloy_db_admin import AlloyDBAdminAsyncClient, AlloyDBAdminClient +from .types.resources import ( + AutomatedBackupPolicy, + Backup, + BackupSource, + Cluster, + DatabaseVersion, + EncryptionConfig, + EncryptionInfo, + Instance, + InstanceView, + MigrationSource, + SslConfig, + SupportedDatabaseFlag, + UserPassword, +) +from .types.service import ( + BatchCreateInstancesMetadata, + BatchCreateInstancesRequest, + BatchCreateInstancesResponse, + BatchCreateInstanceStatus, + CreateBackupRequest, + CreateClusterRequest, + CreateInstanceRequest, + CreateInstanceRequests, + DeleteBackupRequest, + DeleteClusterRequest, + DeleteInstanceRequest, + FailoverInstanceRequest, + GetBackupRequest, + GetClusterRequest, + GetInstanceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListClustersRequest, + ListClustersResponse, + ListInstancesRequest, + ListInstancesResponse, + ListSupportedDatabaseFlagsRequest, + ListSupportedDatabaseFlagsResponse, + OperationMetadata, + RestartInstanceRequest, + RestoreClusterRequest, + UpdateBackupRequest, + UpdateClusterRequest, + UpdateInstanceRequest, +) + +__all__ = ( + "AlloyDBAdminAsyncClient", + "AlloyDBAdminClient", + "AutomatedBackupPolicy", + "Backup", + "BackupSource", + "BatchCreateInstanceStatus", + "BatchCreateInstancesMetadata", + "BatchCreateInstancesRequest", + "BatchCreateInstancesResponse", + "Cluster", + "CreateBackupRequest", + "CreateClusterRequest", + "CreateInstanceRequest", + "CreateInstanceRequests", + "DatabaseVersion", + "DeleteBackupRequest", + "DeleteClusterRequest", + "DeleteInstanceRequest", + "EncryptionConfig", + "EncryptionInfo", + "FailoverInstanceRequest", + "GetBackupRequest", + "GetClusterRequest", + "GetInstanceRequest", + "Instance", + "InstanceView", + "ListBackupsRequest", + "ListBackupsResponse", + "ListClustersRequest", + "ListClustersResponse", + "ListInstancesRequest", + "ListInstancesResponse", + "ListSupportedDatabaseFlagsRequest", + "ListSupportedDatabaseFlagsResponse", + "MigrationSource", + "OperationMetadata", + "RestartInstanceRequest", + "RestoreClusterRequest", + "SslConfig", + "SupportedDatabaseFlag", + "UpdateBackupRequest", + "UpdateClusterRequest", + "UpdateInstanceRequest", + "UserPassword", +) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_metadata.json b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_metadata.json new file mode 100644 index 000000000000..2a9932d16b80 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_metadata.json @@ -0,0 +1,328 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.alloydb_v1", + "protoPackage": "google.cloud.alloydb.v1", + "schema": "1.0", + "services": { + "AlloyDBAdmin": { + "clients": { + "grpc": { + "libraryClient": "AlloyDBAdminClient", + "rpcs": { + "BatchCreateInstances": { + "methods": [ + "batch_create_instances" + ] + }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "FailoverInstance": { + "methods": [ + "failover_instance" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "ListSupportedDatabaseFlags": { + "methods": [ + "list_supported_database_flags" + ] + }, + "RestartInstance": { + "methods": [ + "restart_instance" + ] + }, + "RestoreCluster": { + "methods": [ + "restore_cluster" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AlloyDBAdminAsyncClient", + "rpcs": { + "BatchCreateInstances": { + "methods": [ + "batch_create_instances" + ] + }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "FailoverInstance": { + "methods": [ + "failover_instance" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "ListSupportedDatabaseFlags": { + "methods": [ + "list_supported_database_flags" + ] + }, + "RestartInstance": { + "methods": [ + "restart_instance" + ] + }, + "RestoreCluster": { + "methods": [ + "restore_cluster" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + }, + "rest": { + "libraryClient": "AlloyDBAdminClient", + "rpcs": { + "BatchCreateInstances": { + "methods": [ + "batch_create_instances" + ] + }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "FailoverInstance": { + "methods": [ + "failover_instance" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "ListSupportedDatabaseFlags": { + "methods": [ + "list_supported_database_flags" + ] + }, + "RestartInstance": { + "methods": [ + "restart_instance" + ] + }, + "RestoreCluster": { + "methods": [ + "restore_cluster" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py new file mode 100644 index 000000000000..1b461870ca63 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/py.typed b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/py.typed new file mode 100644 index 000000000000..c71d0f20b482 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-alloydb package uses inline types. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/__init__.py new file mode 100644 index 000000000000..1acb07113321 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AlloyDBAdminAsyncClient +from .client import AlloyDBAdminClient + +__all__ = ( + "AlloyDBAdminClient", + "AlloyDBAdminAsyncClient", +) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py new file mode 100644 index 000000000000..d0baab42bc5b --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py @@ -0,0 +1,3078 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.alloydb_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.alloydb_v1.services.alloy_db_admin import pagers +from google.cloud.alloydb_v1.types import resources, service + +from .client import AlloyDBAdminClient +from .transports.base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport +from .transports.grpc_asyncio import AlloyDBAdminGrpcAsyncIOTransport + + +class AlloyDBAdminAsyncClient: + """Service describing handlers for resources""" + + _client: AlloyDBAdminClient + + DEFAULT_ENDPOINT = AlloyDBAdminClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AlloyDBAdminClient.DEFAULT_MTLS_ENDPOINT + + backup_path = staticmethod(AlloyDBAdminClient.backup_path) + parse_backup_path = staticmethod(AlloyDBAdminClient.parse_backup_path) + cluster_path = staticmethod(AlloyDBAdminClient.cluster_path) + parse_cluster_path = staticmethod(AlloyDBAdminClient.parse_cluster_path) + crypto_key_version_path = staticmethod(AlloyDBAdminClient.crypto_key_version_path) + parse_crypto_key_version_path = staticmethod( + AlloyDBAdminClient.parse_crypto_key_version_path + ) + instance_path = staticmethod(AlloyDBAdminClient.instance_path) + parse_instance_path = staticmethod(AlloyDBAdminClient.parse_instance_path) + network_path = staticmethod(AlloyDBAdminClient.network_path) + parse_network_path = staticmethod(AlloyDBAdminClient.parse_network_path) + supported_database_flag_path = staticmethod( + AlloyDBAdminClient.supported_database_flag_path + ) + parse_supported_database_flag_path = staticmethod( + AlloyDBAdminClient.parse_supported_database_flag_path + ) + common_billing_account_path = staticmethod( + AlloyDBAdminClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AlloyDBAdminClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AlloyDBAdminClient.common_folder_path) + parse_common_folder_path = staticmethod(AlloyDBAdminClient.parse_common_folder_path) + common_organization_path = staticmethod(AlloyDBAdminClient.common_organization_path) + parse_common_organization_path = staticmethod( + AlloyDBAdminClient.parse_common_organization_path + ) + common_project_path = staticmethod(AlloyDBAdminClient.common_project_path) + parse_common_project_path = staticmethod( + AlloyDBAdminClient.parse_common_project_path + ) + common_location_path = staticmethod(AlloyDBAdminClient.common_location_path) + parse_common_location_path = staticmethod( + AlloyDBAdminClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlloyDBAdminAsyncClient: The constructed client. + """ + return AlloyDBAdminClient.from_service_account_info.__func__(AlloyDBAdminAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlloyDBAdminAsyncClient: The constructed client. + """ + return AlloyDBAdminClient.from_service_account_file.__func__(AlloyDBAdminAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AlloyDBAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AlloyDBAdminTransport: + """Returns the transport used by the client instance. + + Returns: + AlloyDBAdminTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(AlloyDBAdminClient).get_transport_class, type(AlloyDBAdminClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, AlloyDBAdminTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the alloy db admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.AlloyDBAdminTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AlloyDBAdminClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_clusters( + self, + request: Optional[Union[service.ListClustersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListClustersAsyncPager: + r"""Lists Clusters in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_list_clusters(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.ListClustersRequest, dict]]): + The request object. Message for requesting list of + Clusters + parent (:class:`str`): + Required. The name of the parent resource. For the + required format, see the comment on the Cluster.name + field. Additionally, you can perform an aggregated list + operation by specifying a value with the following + format: + + - projects/{project}/locations/- + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListClustersAsyncPager: + Message for response to listing + Clusters + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListClustersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_clusters, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListClustersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_cluster( + self, + request: Optional[Union[service.GetClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Cluster: + r"""Gets details of a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_get_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.GetClusterRequest, dict]]): + The request object. Message for getting a Cluster + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1.types.Cluster: + A cluster is a collection of regional + AlloyDB resources. It can include a + primary instance and one or more read + pool instances. All cluster resources + share a storage layer, which scales as + needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_cluster, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_cluster( + self, + request: Optional[Union[service.CreateClusterRequest, dict]] = None, + *, + parent: Optional[str] = None, + cluster: Optional[resources.Cluster] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Cluster in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_create_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.CreateClusterRequest, dict]]): + The request object. Message for creating a Cluster + parent (:class:`str`): + Required. The name of the parent + resource. For the required format, see + the comment on the Cluster.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (:class:`google.cloud.alloydb_v1.types.Cluster`): + Required. The resource being created + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. ID of the requesting + object. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cluster, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cluster is not None: + request.cluster = cluster + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_cluster( + self, + request: Optional[Union[service.UpdateClusterRequest, dict]] = None, + *, + cluster: Optional[resources.Cluster] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_update_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.UpdateClusterRequest, dict]]): + The request object. Message for updating a Cluster + cluster (:class:`google.cloud.alloydb_v1.types.Cluster`): + Required. The resource being updated + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the Cluster resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cluster, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.UpdateClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cluster is not None: + request.cluster = cluster + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("cluster.name", request.cluster.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_cluster( + self, + request: Optional[Union[service.DeleteClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_delete_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.DeleteClusterRequest, dict]]): + The request object. Message for deleting a Cluster + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.DeleteClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def restore_cluster( + self, + request: Optional[Union[service.RestoreClusterRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Cluster in a given project and + location, with a volume restored from the provided + source, either a backup ID or a point-in-time and a + source cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_restore_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup_source = alloydb_v1.BackupSource() + backup_source.backup_name = "backup_name_value" + + cluster = alloydb_v1.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1.RestoreClusterRequest( + backup_source=backup_source, + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.restore_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.RestoreClusterRequest, dict]]): + The request object. Message for restoring a Cluster from + a backup or another cluster at a given + point in time. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + request = service.RestoreClusterRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.restore_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_instances( + self, + request: Optional[Union[service.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesAsyncPager: + r"""Lists Instances in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_list_instances(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.ListInstancesRequest, dict]]): + The request object. Message for requesting list of + Instances + parent (:class:`str`): + Required. The name of the parent resource. For the + required format, see the comment on the Instance.name + field. Additionally, you can perform an aggregated list + operation by specifying a value with one of the + following formats: + + - projects/{project}/locations/-/clusters/- + - projects/{project}/locations/{region}/clusters/- + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListInstancesAsyncPager: + Message for response to listing + Instances + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_instances, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance( + self, + request: Optional[Union[service.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Instance: + r"""Gets details of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_get_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.GetInstanceRequest, dict]]): + The request object. Message for getting a Instance + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1.types.Instance: + An Instance is a computing unit that + an end customer can connect to. It's the + main unit of computing resources in + AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_instance, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_instance( + self, + request: Optional[Union[service.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[resources.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Instance in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_create_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.CreateInstanceRequest, dict]]): + The request object. Message for creating a Instance + parent (:class:`str`): + Required. The name of the parent + resource. For the required format, see + the comment on the Instance.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.alloydb_v1.types.Instance`): + Required. The resource being created + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. ID of the requesting + object. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def batch_create_instances( + self, + request: Optional[Union[service.BatchCreateInstancesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates new instances under the given project, + location and cluster. There can be only one primary + instance in a cluster. If the primary instance exists in + the cluster as well as this request, then API will throw + an error. + The primary instance should exist before any read pool + instance is created. If the primary instance is a part + of the request payload, then the API will take care of + creating instances in the correct order. This method is + here to support Google-internal use cases, and is not + meant for external customers to consume. Please do not + start relying on it; its behavior is subject to change + without notice. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_batch_create_instances(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + requests = alloydb_v1.CreateInstanceRequests() + requests.create_instance_requests.parent = "parent_value" + requests.create_instance_requests.instance_id = "instance_id_value" + requests.create_instance_requests.instance.instance_type = "SECONDARY" + + request = alloydb_v1.BatchCreateInstancesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_instances(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.BatchCreateInstancesRequest, dict]]): + The request object. Message for creating a batch of + instances under the specified cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1.types.BatchCreateInstancesResponse` + Message for creating batches of instances in a cluster. + + """ + # Create or coerce a protobuf request object. + request = service.BatchCreateInstancesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_create_instances, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + service.BatchCreateInstancesResponse, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_instance( + self, + request: Optional[Union[service.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[resources.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_update_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.UpdateInstanceRequest, dict]]): + The request object. Message for updating a Instance + instance (:class:`google.cloud.alloydb_v1.types.Instance`): + Required. The resource being updated + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the Instance resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.UpdateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance( + self, + request: Optional[Union[service.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_delete_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.DeleteInstanceRequest, dict]]): + The request object. Message for deleting a Instance + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def failover_instance( + self, + request: Optional[Union[service.FailoverInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Forces a Failover for a highly available instance. + Failover promotes the HA standby instance as the new + primary. Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_failover_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.FailoverInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.FailoverInstanceRequest, dict]]): + The request object. Message for triggering failover on an + Instance + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.FailoverInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.failover_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def restart_instance( + self, + request: Optional[Union[service.RestartInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Restart an Instance in a cluster. + Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_restart_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.RestartInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.restart_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.RestartInstanceRequest, dict]]): + The request object. + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.RestartInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.restart_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_backups( + self, + request: Optional[Union[service.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsAsyncPager: + r"""Lists Backups in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_list_backups(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.ListBackupsRequest, dict]]): + The request object. Message for requesting list of + Backups + parent (:class:`str`): + Required. Parent value for + ListBackupsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListBackupsAsyncPager: + Message for response to listing + Backups + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListBackupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup( + self, + request: Optional[Union[service.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Backup: + r"""Gets details of a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_get_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.GetBackupRequest, dict]]): + The request object. Message for getting a Backup + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1.types.Backup: + Message describing Backup object + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_backup( + self, + request: Optional[Union[service.CreateBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup: Optional[resources.Backup] = None, + backup_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Backup in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_create_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup = alloydb_v1.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + backup=backup, + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.CreateBackupRequest, dict]]): + The request object. Message for creating a Backup + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (:class:`google.cloud.alloydb_v1.types.Backup`): + Required. The resource being created + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (:class:`str`): + Required. ID of the requesting + object. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1.types.Backup` Message + describing Backup object + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup, backup_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_backup, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Backup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_backup( + self, + request: Optional[Union[service.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[resources.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_update_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup = alloydb_v1.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1.UpdateBackupRequest( + backup=backup, + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.UpdateBackupRequest, dict]]): + The request object. Message for updating a Backup + backup (:class:`google.cloud.alloydb_v1.types.Backup`): + Required. The resource being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1.types.Backup` Message + describing Backup object + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.UpdateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_backup, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Backup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_backup( + self, + request: Optional[Union[service.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_delete_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.DeleteBackupRequest, dict]]): + The request object. Message for deleting a Backup + name (:class:`str`): + Required. Name of the resource. For + the required format, see the comment on + the Backup.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.DeleteBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_backup, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_supported_database_flags( + self, + request: Optional[ + Union[service.ListSupportedDatabaseFlagsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSupportedDatabaseFlagsAsyncPager: + r"""Lists SupportedDatabaseFlags for a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + async def sample_list_supported_database_flags(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.ListSupportedDatabaseFlagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_supported_database_flags(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1.types.ListSupportedDatabaseFlagsRequest, dict]]): + The request object. Message for listing the information + about the supported Database flags. + parent (:class:`str`): + Required. The name of the parent resource. The required + format is: + + - projects/{project}/locations/{location} + + Regardless of the parent specified here, as long it is + contains a valid project and location, the service will + return a static list of supported flags resources. Note + that we do not yet support region-specific flags. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListSupportedDatabaseFlagsAsyncPager: + Message for response to listing + SupportedDatabaseFlags. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListSupportedDatabaseFlagsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_supported_database_flags, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSupportedDatabaseFlagsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AlloyDBAdminAsyncClient",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py new file mode 100644 index 000000000000..740b86a05cb4 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py @@ -0,0 +1,3354 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.alloydb_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.alloydb_v1.services.alloy_db_admin import pagers +from google.cloud.alloydb_v1.types import resources, service + +from .transports.base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport +from .transports.grpc import AlloyDBAdminGrpcTransport +from .transports.grpc_asyncio import AlloyDBAdminGrpcAsyncIOTransport +from .transports.rest import AlloyDBAdminRestTransport + + +class AlloyDBAdminClientMeta(type): + """Metaclass for the AlloyDBAdmin client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[AlloyDBAdminTransport]] + _transport_registry["grpc"] = AlloyDBAdminGrpcTransport + _transport_registry["grpc_asyncio"] = AlloyDBAdminGrpcAsyncIOTransport + _transport_registry["rest"] = AlloyDBAdminRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AlloyDBAdminTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AlloyDBAdminClient(metaclass=AlloyDBAdminClientMeta): + """Service describing handlers for resources""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "alloydb.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlloyDBAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlloyDBAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AlloyDBAdminTransport: + """Returns the transport used by the client instance. + + Returns: + AlloyDBAdminTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def backup_path( + project: str, + location: str, + backup: str, + ) -> str: + """Returns a fully-qualified backup string.""" + return "projects/{project}/locations/{location}/backups/{backup}".format( + project=project, + location=location, + backup=backup, + ) + + @staticmethod + def parse_backup_path(path: str) -> Dict[str, str]: + """Parses a backup path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def cluster_path( + project: str, + location: str, + cluster: str, + ) -> str: + """Returns a fully-qualified cluster string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, + location=location, + cluster=cluster, + ) + + @staticmethod + def parse_cluster_path(path: str) -> Dict[str, str]: + """Parses a cluster path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def crypto_key_version_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + crypto_key_version: str, + ) -> str: + """Returns a fully-qualified crypto_key_version string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + crypto_key_version=crypto_key_version, + ) + + @staticmethod + def parse_crypto_key_version_path(path: str) -> Dict[str, str]: + """Parses a crypto_key_version path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)/cryptoKeyVersions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def instance_path( + project: str, + location: str, + cluster: str, + instance: str, + ) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance}".format( + project=project, + location=location, + cluster=cluster, + instance=instance, + ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str, str]: + """Parses a instance path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)/instances/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def network_path( + project: str, + network: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def supported_database_flag_path( + project: str, + location: str, + flag: str, + ) -> str: + """Returns a fully-qualified supported_database_flag string.""" + return "projects/{project}/locations/{location}/flags/{flag}".format( + project=project, + location=location, + flag=flag, + ) + + @staticmethod + def parse_supported_database_flag_path(path: str) -> Dict[str, str]: + """Parses a supported_database_flag path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/flags/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AlloyDBAdminTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the alloy db admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, AlloyDBAdminTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, AlloyDBAdminTransport): + # transport is a AlloyDBAdminTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_clusters( + self, + request: Optional[Union[service.ListClustersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListClustersPager: + r"""Lists Clusters in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_list_clusters(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.ListClustersRequest, dict]): + The request object. Message for requesting list of + Clusters + parent (str): + Required. The name of the parent resource. For the + required format, see the comment on the Cluster.name + field. Additionally, you can perform an aggregated list + operation by specifying a value with the following + format: + + - projects/{project}/locations/- + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListClustersPager: + Message for response to listing + Clusters + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListClustersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListClustersRequest): + request = service.ListClustersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_clusters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListClustersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_cluster( + self, + request: Optional[Union[service.GetClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Cluster: + r"""Gets details of a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_get_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.GetClusterRequest, dict]): + The request object. Message for getting a Cluster + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1.types.Cluster: + A cluster is a collection of regional + AlloyDB resources. It can include a + primary instance and one or more read + pool instances. All cluster resources + share a storage layer, which scales as + needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetClusterRequest): + request = service.GetClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_cluster( + self, + request: Optional[Union[service.CreateClusterRequest, dict]] = None, + *, + parent: Optional[str] = None, + cluster: Optional[resources.Cluster] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Cluster in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_create_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.CreateClusterRequest, dict]): + The request object. Message for creating a Cluster + parent (str): + Required. The name of the parent + resource. For the required format, see + the comment on the Cluster.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (google.cloud.alloydb_v1.types.Cluster): + Required. The resource being created + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. ID of the requesting + object. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cluster, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateClusterRequest): + request = service.CreateClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cluster is not None: + request.cluster = cluster + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_cluster( + self, + request: Optional[Union[service.UpdateClusterRequest, dict]] = None, + *, + cluster: Optional[resources.Cluster] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_update_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.UpdateClusterRequest, dict]): + The request object. Message for updating a Cluster + cluster (google.cloud.alloydb_v1.types.Cluster): + Required. The resource being updated + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Cluster resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cluster, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateClusterRequest): + request = service.UpdateClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cluster is not None: + request.cluster = cluster + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("cluster.name", request.cluster.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_cluster( + self, + request: Optional[Union[service.DeleteClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_delete_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.DeleteClusterRequest, dict]): + The request object. Message for deleting a Cluster + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteClusterRequest): + request = service.DeleteClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def restore_cluster( + self, + request: Optional[Union[service.RestoreClusterRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Cluster in a given project and + location, with a volume restored from the provided + source, either a backup ID or a point-in-time and a + source cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_restore_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + backup_source = alloydb_v1.BackupSource() + backup_source.backup_name = "backup_name_value" + + cluster = alloydb_v1.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1.RestoreClusterRequest( + backup_source=backup_source, + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.restore_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.RestoreClusterRequest, dict]): + The request object. Message for restoring a Cluster from + a backup or another cluster at a given + point in time. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.RestoreClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.RestoreClusterRequest): + request = service.RestoreClusterRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_instances( + self, + request: Optional[Union[service.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists Instances in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_list_instances(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.ListInstancesRequest, dict]): + The request object. Message for requesting list of + Instances + parent (str): + Required. The name of the parent resource. For the + required format, see the comment on the Instance.name + field. Additionally, you can perform an aggregated list + operation by specifying a value with one of the + following formats: + + - projects/{project}/locations/-/clusters/- + - projects/{project}/locations/{region}/clusters/- + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListInstancesPager: + Message for response to listing + Instances + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListInstancesRequest): + request = service.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance( + self, + request: Optional[Union[service.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Instance: + r"""Gets details of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_get_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.GetInstanceRequest, dict]): + The request object. Message for getting a Instance + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1.types.Instance: + An Instance is a computing unit that + an end customer can connect to. It's the + main unit of computing resources in + AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetInstanceRequest): + request = service.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance( + self, + request: Optional[Union[service.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[resources.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Instance in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_create_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.CreateInstanceRequest, dict]): + The request object. Message for creating a Instance + parent (str): + Required. The name of the parent + resource. For the required format, see + the comment on the Instance.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.alloydb_v1.types.Instance): + Required. The resource being created + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. ID of the requesting + object. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateInstanceRequest): + request = service.CreateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def batch_create_instances( + self, + request: Optional[Union[service.BatchCreateInstancesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates new instances under the given project, + location and cluster. There can be only one primary + instance in a cluster. If the primary instance exists in + the cluster as well as this request, then API will throw + an error. + The primary instance should exist before any read pool + instance is created. If the primary instance is a part + of the request payload, then the API will take care of + creating instances in the correct order. This method is + here to support Google-internal use cases, and is not + meant for external customers to consume. Please do not + start relying on it; its behavior is subject to change + without notice. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_batch_create_instances(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + requests = alloydb_v1.CreateInstanceRequests() + requests.create_instance_requests.parent = "parent_value" + requests.create_instance_requests.instance_id = "instance_id_value" + requests.create_instance_requests.instance.instance_type = "SECONDARY" + + request = alloydb_v1.BatchCreateInstancesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_instances(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.BatchCreateInstancesRequest, dict]): + The request object. Message for creating a batch of + instances under the specified cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1.types.BatchCreateInstancesResponse` + Message for creating batches of instances in a cluster. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.BatchCreateInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.BatchCreateInstancesRequest): + request = service.BatchCreateInstancesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_create_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + service.BatchCreateInstancesResponse, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_instance( + self, + request: Optional[Union[service.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[resources.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_update_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.UpdateInstanceRequest, dict]): + The request object. Message for updating a Instance + instance (google.cloud.alloydb_v1.types.Instance): + Required. The resource being updated + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Instance resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateInstanceRequest): + request = service.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_instance( + self, + request: Optional[Union[service.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_delete_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.DeleteInstanceRequest, dict]): + The request object. Message for deleting a Instance + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteInstanceRequest): + request = service.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def failover_instance( + self, + request: Optional[Union[service.FailoverInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Forces a Failover for a highly available instance. + Failover promotes the HA standby instance as the new + primary. Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_failover_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.FailoverInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.FailoverInstanceRequest, dict]): + The request object. Message for triggering failover on an + Instance + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.FailoverInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.FailoverInstanceRequest): + request = service.FailoverInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.failover_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def restart_instance( + self, + request: Optional[Union[service.RestartInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Restart an Instance in a cluster. + Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_restart_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.RestartInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.restart_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.RestartInstanceRequest, dict]): + The request object. + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.RestartInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.RestartInstanceRequest): + request = service.RestartInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restart_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_backups( + self, + request: Optional[Union[service.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsPager: + r"""Lists Backups in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_list_backups(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.ListBackupsRequest, dict]): + The request object. Message for requesting list of + Backups + parent (str): + Required. Parent value for + ListBackupsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListBackupsPager: + Message for response to listing + Backups + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListBackupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListBackupsRequest): + request = service.ListBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup( + self, + request: Optional[Union[service.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Backup: + r"""Gets details of a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_get_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.GetBackupRequest, dict]): + The request object. Message for getting a Backup + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1.types.Backup: + Message describing Backup object + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetBackupRequest): + request = service.GetBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_backup( + self, + request: Optional[Union[service.CreateBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup: Optional[resources.Backup] = None, + backup_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Backup in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_create_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + backup = alloydb_v1.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + backup=backup, + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.CreateBackupRequest, dict]): + The request object. Message for creating a Backup + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (google.cloud.alloydb_v1.types.Backup): + Required. The resource being created + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (str): + Required. ID of the requesting + object. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1.types.Backup` Message + describing Backup object + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup, backup_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateBackupRequest): + request = service.CreateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Backup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_backup( + self, + request: Optional[Union[service.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[resources.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_update_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + backup = alloydb_v1.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1.UpdateBackupRequest( + backup=backup, + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.UpdateBackupRequest, dict]): + The request object. Message for updating a Backup + backup (google.cloud.alloydb_v1.types.Backup): + Required. The resource being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1.types.Backup` Message + describing Backup object + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateBackupRequest): + request = service.UpdateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Backup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_backup( + self, + request: Optional[Union[service.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_delete_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.DeleteBackupRequest, dict]): + The request object. Message for deleting a Backup + name (str): + Required. Name of the resource. For + the required format, see the comment on + the Backup.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteBackupRequest): + request = service.DeleteBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_supported_database_flags( + self, + request: Optional[ + Union[service.ListSupportedDatabaseFlagsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSupportedDatabaseFlagsPager: + r"""Lists SupportedDatabaseFlags for a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1 + + def sample_list_supported_database_flags(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.ListSupportedDatabaseFlagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_supported_database_flags(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.alloydb_v1.types.ListSupportedDatabaseFlagsRequest, dict]): + The request object. Message for listing the information + about the supported Database flags. + parent (str): + Required. The name of the parent resource. The required + format is: + + - projects/{project}/locations/{location} + + Regardless of the parent specified here, as long it is + contains a valid project and location, the service will + return a static list of supported flags resources. Note + that we do not yet support region-specific flags. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListSupportedDatabaseFlagsPager: + Message for response to listing + SupportedDatabaseFlags. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListSupportedDatabaseFlagsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListSupportedDatabaseFlagsRequest): + request = service.ListSupportedDatabaseFlagsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_supported_database_flags + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSupportedDatabaseFlagsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AlloyDBAdminClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AlloyDBAdminClient",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/pagers.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/pagers.py new file mode 100644 index 000000000000..bc0f16a79f03 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/pagers.py @@ -0,0 +1,539 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.alloydb_v1.types import resources, service + + +class ListClustersPager: + """A pager for iterating through ``list_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1.types.ListClustersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``clusters`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListClusters`` requests and continue to iterate + through the ``clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1.types.ListClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListClustersResponse], + request: service.ListClustersRequest, + response: service.ListClustersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1.types.ListClustersRequest): + The initial request object. + response (google.cloud.alloydb_v1.types.ListClustersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListClustersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Cluster]: + for page in self.pages: + yield from page.clusters + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListClustersAsyncPager: + """A pager for iterating through ``list_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1.types.ListClustersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``clusters`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListClusters`` requests and continue to iterate + through the ``clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1.types.ListClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListClustersResponse]], + request: service.ListClustersRequest, + response: service.ListClustersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1.types.ListClustersRequest): + The initial request object. + response (google.cloud.alloydb_v1.types.ListClustersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListClustersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Cluster]: + async def async_generator(): + async for page in self.pages: + for response in page.clusters: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1.types.ListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListInstancesResponse], + request: service.ListInstancesRequest, + response: service.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.alloydb_v1.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Instance]: + for page in self.pages: + yield from page.instances + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstancesAsyncPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1.types.ListInstancesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListInstancesResponse]], + request: service.ListInstancesRequest, + response: service.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.alloydb_v1.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Instance]: + async def async_generator(): + async for page in self.pages: + for response in page.instances: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1.types.ListBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListBackupsResponse], + request: service.ListBackupsRequest, + response: service.ListBackupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.alloydb_v1.types.ListBackupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListBackupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Backup]: + for page in self.pages: + yield from page.backups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsAsyncPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1.types.ListBackupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListBackupsResponse]], + request: service.ListBackupsRequest, + response: service.ListBackupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.alloydb_v1.types.ListBackupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListBackupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Backup]: + async def async_generator(): + async for page in self.pages: + for response in page.backups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSupportedDatabaseFlagsPager: + """A pager for iterating through ``list_supported_database_flags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1.types.ListSupportedDatabaseFlagsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``supported_database_flags`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSupportedDatabaseFlags`` requests and continue to iterate + through the ``supported_database_flags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1.types.ListSupportedDatabaseFlagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListSupportedDatabaseFlagsResponse], + request: service.ListSupportedDatabaseFlagsRequest, + response: service.ListSupportedDatabaseFlagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1.types.ListSupportedDatabaseFlagsRequest): + The initial request object. + response (google.cloud.alloydb_v1.types.ListSupportedDatabaseFlagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSupportedDatabaseFlagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListSupportedDatabaseFlagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.SupportedDatabaseFlag]: + for page in self.pages: + yield from page.supported_database_flags + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSupportedDatabaseFlagsAsyncPager: + """A pager for iterating through ``list_supported_database_flags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1.types.ListSupportedDatabaseFlagsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``supported_database_flags`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSupportedDatabaseFlags`` requests and continue to iterate + through the ``supported_database_flags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1.types.ListSupportedDatabaseFlagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListSupportedDatabaseFlagsResponse]], + request: service.ListSupportedDatabaseFlagsRequest, + response: service.ListSupportedDatabaseFlagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1.types.ListSupportedDatabaseFlagsRequest): + The initial request object. + response (google.cloud.alloydb_v1.types.ListSupportedDatabaseFlagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSupportedDatabaseFlagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListSupportedDatabaseFlagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.SupportedDatabaseFlag]: + async def async_generator(): + async for page in self.pages: + for response in page.supported_database_flags: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/__init__.py new file mode 100644 index 000000000000..1e347e0f52b8 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AlloyDBAdminTransport +from .grpc import AlloyDBAdminGrpcTransport +from .grpc_asyncio import AlloyDBAdminGrpcAsyncIOTransport +from .rest import AlloyDBAdminRestInterceptor, AlloyDBAdminRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AlloyDBAdminTransport]] +_transport_registry["grpc"] = AlloyDBAdminGrpcTransport +_transport_registry["grpc_asyncio"] = AlloyDBAdminGrpcAsyncIOTransport +_transport_registry["rest"] = AlloyDBAdminRestTransport + +__all__ = ( + "AlloyDBAdminTransport", + "AlloyDBAdminGrpcTransport", + "AlloyDBAdminGrpcAsyncIOTransport", + "AlloyDBAdminRestTransport", + "AlloyDBAdminRestInterceptor", +) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/base.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/base.py new file mode 100644 index 000000000000..ccbdda3787e6 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/base.py @@ -0,0 +1,549 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.alloydb_v1 import gapic_version as package_version +from google.cloud.alloydb_v1.types import resources, service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AlloyDBAdminTransport(abc.ABC): + """Abstract transport class for AlloyDBAdmin.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "alloydb.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_clusters: gapic_v1.method.wrap_method( + self.list_clusters, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cluster: gapic_v1.method.wrap_method( + self.get_cluster, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_cluster: gapic_v1.method.wrap_method( + self.create_cluster, + default_timeout=None, + client_info=client_info, + ), + self.update_cluster: gapic_v1.method.wrap_method( + self.update_cluster, + default_timeout=None, + client_info=client_info, + ), + self.delete_cluster: gapic_v1.method.wrap_method( + self.delete_cluster, + default_timeout=None, + client_info=client_info, + ), + self.restore_cluster: gapic_v1.method.wrap_method( + self.restore_cluster, + default_timeout=None, + client_info=client_info, + ), + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, + default_timeout=None, + client_info=client_info, + ), + self.batch_create_instances: gapic_v1.method.wrap_method( + self.batch_create_instances, + default_timeout=None, + client_info=client_info, + ), + self.update_instance: gapic_v1.method.wrap_method( + self.update_instance, + default_timeout=None, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, + default_timeout=None, + client_info=client_info, + ), + self.failover_instance: gapic_v1.method.wrap_method( + self.failover_instance, + default_timeout=None, + client_info=client_info, + ), + self.restart_instance: gapic_v1.method.wrap_method( + self.restart_instance, + default_timeout=None, + client_info=client_info, + ), + self.list_backups: gapic_v1.method.wrap_method( + self.list_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method.wrap_method( + self.get_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_backup: gapic_v1.method.wrap_method( + self.create_backup, + default_timeout=None, + client_info=client_info, + ), + self.update_backup: gapic_v1.method.wrap_method( + self.update_backup, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method.wrap_method( + self.delete_backup, + default_timeout=None, + client_info=client_info, + ), + self.list_supported_database_flags: gapic_v1.method.wrap_method( + self.list_supported_database_flags, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_clusters( + self, + ) -> Callable[ + [service.ListClustersRequest], + Union[service.ListClustersResponse, Awaitable[service.ListClustersResponse]], + ]: + raise NotImplementedError() + + @property + def get_cluster( + self, + ) -> Callable[ + [service.GetClusterRequest], + Union[resources.Cluster, Awaitable[resources.Cluster]], + ]: + raise NotImplementedError() + + @property + def create_cluster( + self, + ) -> Callable[ + [service.CreateClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_cluster( + self, + ) -> Callable[ + [service.UpdateClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_cluster( + self, + ) -> Callable[ + [service.DeleteClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def restore_cluster( + self, + ) -> Callable[ + [service.RestoreClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_instances( + self, + ) -> Callable[ + [service.ListInstancesRequest], + Union[service.ListInstancesResponse, Awaitable[service.ListInstancesResponse]], + ]: + raise NotImplementedError() + + @property + def get_instance( + self, + ) -> Callable[ + [service.GetInstanceRequest], + Union[resources.Instance, Awaitable[resources.Instance]], + ]: + raise NotImplementedError() + + @property + def create_instance( + self, + ) -> Callable[ + [service.CreateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def batch_create_instances( + self, + ) -> Callable[ + [service.BatchCreateInstancesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_instance( + self, + ) -> Callable[ + [service.UpdateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_instance( + self, + ) -> Callable[ + [service.DeleteInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def failover_instance( + self, + ) -> Callable[ + [service.FailoverInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def restart_instance( + self, + ) -> Callable[ + [service.RestartInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_backups( + self, + ) -> Callable[ + [service.ListBackupsRequest], + Union[service.ListBackupsResponse, Awaitable[service.ListBackupsResponse]], + ]: + raise NotImplementedError() + + @property + def get_backup( + self, + ) -> Callable[ + [service.GetBackupRequest], Union[resources.Backup, Awaitable[resources.Backup]] + ]: + raise NotImplementedError() + + @property + def create_backup( + self, + ) -> Callable[ + [service.CreateBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_backup( + self, + ) -> Callable[ + [service.UpdateBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_backup( + self, + ) -> Callable[ + [service.DeleteBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_supported_database_flags( + self, + ) -> Callable[ + [service.ListSupportedDatabaseFlagsRequest], + Union[ + service.ListSupportedDatabaseFlagsResponse, + Awaitable[service.ListSupportedDatabaseFlagsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AlloyDBAdminTransport",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/grpc.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/grpc.py new file mode 100644 index 000000000000..e7f6c3c46691 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/grpc.py @@ -0,0 +1,906 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.alloydb_v1.types import resources, service + +from .base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport + + +class AlloyDBAdminGrpcTransport(AlloyDBAdminTransport): + """gRPC backend transport for AlloyDBAdmin. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "alloydb.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "alloydb.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_clusters( + self, + ) -> Callable[[service.ListClustersRequest], service.ListClustersResponse]: + r"""Return a callable for the list clusters method over gRPC. + + Lists Clusters in a given project and location. + + Returns: + Callable[[~.ListClustersRequest], + ~.ListClustersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_clusters" not in self._stubs: + self._stubs["list_clusters"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/ListClusters", + request_serializer=service.ListClustersRequest.serialize, + response_deserializer=service.ListClustersResponse.deserialize, + ) + return self._stubs["list_clusters"] + + @property + def get_cluster(self) -> Callable[[service.GetClusterRequest], resources.Cluster]: + r"""Return a callable for the get cluster method over gRPC. + + Gets details of a single Cluster. + + Returns: + Callable[[~.GetClusterRequest], + ~.Cluster]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster" not in self._stubs: + self._stubs["get_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/GetCluster", + request_serializer=service.GetClusterRequest.serialize, + response_deserializer=resources.Cluster.deserialize, + ) + return self._stubs["get_cluster"] + + @property + def create_cluster( + self, + ) -> Callable[[service.CreateClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the create cluster method over gRPC. + + Creates a new Cluster in a given project and + location. + + Returns: + Callable[[~.CreateClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cluster" not in self._stubs: + self._stubs["create_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/CreateCluster", + request_serializer=service.CreateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_cluster"] + + @property + def update_cluster( + self, + ) -> Callable[[service.UpdateClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the update cluster method over gRPC. + + Updates the parameters of a single Cluster. + + Returns: + Callable[[~.UpdateClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cluster" not in self._stubs: + self._stubs["update_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/UpdateCluster", + request_serializer=service.UpdateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_cluster"] + + @property + def delete_cluster( + self, + ) -> Callable[[service.DeleteClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the delete cluster method over gRPC. + + Deletes a single Cluster. + + Returns: + Callable[[~.DeleteClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cluster" not in self._stubs: + self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/DeleteCluster", + request_serializer=service.DeleteClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_cluster"] + + @property + def restore_cluster( + self, + ) -> Callable[[service.RestoreClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the restore cluster method over gRPC. + + Creates a new Cluster in a given project and + location, with a volume restored from the provided + source, either a backup ID or a point-in-time and a + source cluster. + + Returns: + Callable[[~.RestoreClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_cluster" not in self._stubs: + self._stubs["restore_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/RestoreCluster", + request_serializer=service.RestoreClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_cluster"] + + @property + def list_instances( + self, + ) -> Callable[[service.ListInstancesRequest], service.ListInstancesResponse]: + r"""Return a callable for the list instances method over gRPC. + + Lists Instances in a given project and location. + + Returns: + Callable[[~.ListInstancesRequest], + ~.ListInstancesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/ListInstances", + request_serializer=service.ListInstancesRequest.serialize, + response_deserializer=service.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[[service.GetInstanceRequest], resources.Instance]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single Instance. + + Returns: + Callable[[~.GetInstanceRequest], + ~.Instance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/GetInstance", + request_serializer=service.GetInstanceRequest.serialize, + response_deserializer=resources.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[[service.CreateInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the create instance method over gRPC. + + Creates a new Instance in a given project and + location. + + Returns: + Callable[[~.CreateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/CreateInstance", + request_serializer=service.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def batch_create_instances( + self, + ) -> Callable[[service.BatchCreateInstancesRequest], operations_pb2.Operation]: + r"""Return a callable for the batch create instances method over gRPC. + + Creates new instances under the given project, + location and cluster. There can be only one primary + instance in a cluster. If the primary instance exists in + the cluster as well as this request, then API will throw + an error. + The primary instance should exist before any read pool + instance is created. If the primary instance is a part + of the request payload, then the API will take care of + creating instances in the correct order. This method is + here to support Google-internal use cases, and is not + meant for external customers to consume. Please do not + start relying on it; its behavior is subject to change + without notice. + + Returns: + Callable[[~.BatchCreateInstancesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_instances" not in self._stubs: + self._stubs["batch_create_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/BatchCreateInstances", + request_serializer=service.BatchCreateInstancesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_create_instances"] + + @property + def update_instance( + self, + ) -> Callable[[service.UpdateInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the update instance method over gRPC. + + Updates the parameters of a single Instance. + + Returns: + Callable[[~.UpdateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/UpdateInstance", + request_serializer=service.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_instance"] + + @property + def delete_instance( + self, + ) -> Callable[[service.DeleteInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single Instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/DeleteInstance", + request_serializer=service.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def failover_instance( + self, + ) -> Callable[[service.FailoverInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the failover instance method over gRPC. + + Forces a Failover for a highly available instance. + Failover promotes the HA standby instance as the new + primary. Imperative only. + + Returns: + Callable[[~.FailoverInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "failover_instance" not in self._stubs: + self._stubs["failover_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/FailoverInstance", + request_serializer=service.FailoverInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["failover_instance"] + + @property + def restart_instance( + self, + ) -> Callable[[service.RestartInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the restart instance method over gRPC. + + Restart an Instance in a cluster. + Imperative only. + + Returns: + Callable[[~.RestartInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restart_instance" not in self._stubs: + self._stubs["restart_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/RestartInstance", + request_serializer=service.RestartInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restart_instance"] + + @property + def list_backups( + self, + ) -> Callable[[service.ListBackupsRequest], service.ListBackupsResponse]: + r"""Return a callable for the list backups method over gRPC. + + Lists Backups in a given project and location. + + Returns: + Callable[[~.ListBackupsRequest], + ~.ListBackupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/ListBackups", + request_serializer=service.ListBackupsRequest.serialize, + response_deserializer=service.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup(self) -> Callable[[service.GetBackupRequest], resources.Backup]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a single Backup. + + Returns: + Callable[[~.GetBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/GetBackup", + request_serializer=service.GetBackupRequest.serialize, + response_deserializer=resources.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def create_backup( + self, + ) -> Callable[[service.CreateBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the create backup method over gRPC. + + Creates a new Backup in a given project and location. + + Returns: + Callable[[~.CreateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup" not in self._stubs: + self._stubs["create_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/CreateBackup", + request_serializer=service.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup"] + + @property + def update_backup( + self, + ) -> Callable[[service.UpdateBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the update backup method over gRPC. + + Updates the parameters of a single Backup. + + Returns: + Callable[[~.UpdateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/UpdateBackup", + request_serializer=service.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def delete_backup( + self, + ) -> Callable[[service.DeleteBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a single Backup. + + Returns: + Callable[[~.DeleteBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/DeleteBackup", + request_serializer=service.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def list_supported_database_flags( + self, + ) -> Callable[ + [service.ListSupportedDatabaseFlagsRequest], + service.ListSupportedDatabaseFlagsResponse, + ]: + r"""Return a callable for the list supported database flags method over gRPC. + + Lists SupportedDatabaseFlags for a given project and + location. + + Returns: + Callable[[~.ListSupportedDatabaseFlagsRequest], + ~.ListSupportedDatabaseFlagsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_supported_database_flags" not in self._stubs: + self._stubs[ + "list_supported_database_flags" + ] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/ListSupportedDatabaseFlags", + request_serializer=service.ListSupportedDatabaseFlagsRequest.serialize, + response_deserializer=service.ListSupportedDatabaseFlagsResponse.deserialize, + ) + return self._stubs["list_supported_database_flags"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AlloyDBAdminGrpcTransport",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/grpc_asyncio.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/grpc_asyncio.py new file mode 100644 index 000000000000..6de9df150339 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/grpc_asyncio.py @@ -0,0 +1,921 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.alloydb_v1.types import resources, service + +from .base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport +from .grpc import AlloyDBAdminGrpcTransport + + +class AlloyDBAdminGrpcAsyncIOTransport(AlloyDBAdminTransport): + """gRPC AsyncIO backend transport for AlloyDBAdmin. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "alloydb.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "alloydb.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_clusters( + self, + ) -> Callable[ + [service.ListClustersRequest], Awaitable[service.ListClustersResponse] + ]: + r"""Return a callable for the list clusters method over gRPC. + + Lists Clusters in a given project and location. + + Returns: + Callable[[~.ListClustersRequest], + Awaitable[~.ListClustersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_clusters" not in self._stubs: + self._stubs["list_clusters"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/ListClusters", + request_serializer=service.ListClustersRequest.serialize, + response_deserializer=service.ListClustersResponse.deserialize, + ) + return self._stubs["list_clusters"] + + @property + def get_cluster( + self, + ) -> Callable[[service.GetClusterRequest], Awaitable[resources.Cluster]]: + r"""Return a callable for the get cluster method over gRPC. + + Gets details of a single Cluster. + + Returns: + Callable[[~.GetClusterRequest], + Awaitable[~.Cluster]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster" not in self._stubs: + self._stubs["get_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/GetCluster", + request_serializer=service.GetClusterRequest.serialize, + response_deserializer=resources.Cluster.deserialize, + ) + return self._stubs["get_cluster"] + + @property + def create_cluster( + self, + ) -> Callable[[service.CreateClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create cluster method over gRPC. + + Creates a new Cluster in a given project and + location. + + Returns: + Callable[[~.CreateClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cluster" not in self._stubs: + self._stubs["create_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/CreateCluster", + request_serializer=service.CreateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_cluster"] + + @property + def update_cluster( + self, + ) -> Callable[[service.UpdateClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update cluster method over gRPC. + + Updates the parameters of a single Cluster. + + Returns: + Callable[[~.UpdateClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cluster" not in self._stubs: + self._stubs["update_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/UpdateCluster", + request_serializer=service.UpdateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_cluster"] + + @property + def delete_cluster( + self, + ) -> Callable[[service.DeleteClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete cluster method over gRPC. + + Deletes a single Cluster. + + Returns: + Callable[[~.DeleteClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cluster" not in self._stubs: + self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/DeleteCluster", + request_serializer=service.DeleteClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_cluster"] + + @property + def restore_cluster( + self, + ) -> Callable[[service.RestoreClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the restore cluster method over gRPC. + + Creates a new Cluster in a given project and + location, with a volume restored from the provided + source, either a backup ID or a point-in-time and a + source cluster. + + Returns: + Callable[[~.RestoreClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_cluster" not in self._stubs: + self._stubs["restore_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/RestoreCluster", + request_serializer=service.RestoreClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_cluster"] + + @property + def list_instances( + self, + ) -> Callable[ + [service.ListInstancesRequest], Awaitable[service.ListInstancesResponse] + ]: + r"""Return a callable for the list instances method over gRPC. + + Lists Instances in a given project and location. + + Returns: + Callable[[~.ListInstancesRequest], + Awaitable[~.ListInstancesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/ListInstances", + request_serializer=service.ListInstancesRequest.serialize, + response_deserializer=service.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[[service.GetInstanceRequest], Awaitable[resources.Instance]]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single Instance. + + Returns: + Callable[[~.GetInstanceRequest], + Awaitable[~.Instance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/GetInstance", + request_serializer=service.GetInstanceRequest.serialize, + response_deserializer=resources.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[[service.CreateInstanceRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create instance method over gRPC. + + Creates a new Instance in a given project and + location. + + Returns: + Callable[[~.CreateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/CreateInstance", + request_serializer=service.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def batch_create_instances( + self, + ) -> Callable[ + [service.BatchCreateInstancesRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the batch create instances method over gRPC. + + Creates new instances under the given project, + location and cluster. There can be only one primary + instance in a cluster. If the primary instance exists in + the cluster as well as this request, then API will throw + an error. + The primary instance should exist before any read pool + instance is created. If the primary instance is a part + of the request payload, then the API will take care of + creating instances in the correct order. This method is + here to support Google-internal use cases, and is not + meant for external customers to consume. Please do not + start relying on it; its behavior is subject to change + without notice. + + Returns: + Callable[[~.BatchCreateInstancesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_instances" not in self._stubs: + self._stubs["batch_create_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/BatchCreateInstances", + request_serializer=service.BatchCreateInstancesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_create_instances"] + + @property + def update_instance( + self, + ) -> Callable[[service.UpdateInstanceRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update instance method over gRPC. + + Updates the parameters of a single Instance. + + Returns: + Callable[[~.UpdateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/UpdateInstance", + request_serializer=service.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_instance"] + + @property + def delete_instance( + self, + ) -> Callable[[service.DeleteInstanceRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single Instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/DeleteInstance", + request_serializer=service.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def failover_instance( + self, + ) -> Callable[ + [service.FailoverInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the failover instance method over gRPC. + + Forces a Failover for a highly available instance. + Failover promotes the HA standby instance as the new + primary. Imperative only. + + Returns: + Callable[[~.FailoverInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "failover_instance" not in self._stubs: + self._stubs["failover_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/FailoverInstance", + request_serializer=service.FailoverInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["failover_instance"] + + @property + def restart_instance( + self, + ) -> Callable[ + [service.RestartInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the restart instance method over gRPC. + + Restart an Instance in a cluster. + Imperative only. + + Returns: + Callable[[~.RestartInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restart_instance" not in self._stubs: + self._stubs["restart_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/RestartInstance", + request_serializer=service.RestartInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restart_instance"] + + @property + def list_backups( + self, + ) -> Callable[[service.ListBackupsRequest], Awaitable[service.ListBackupsResponse]]: + r"""Return a callable for the list backups method over gRPC. + + Lists Backups in a given project and location. + + Returns: + Callable[[~.ListBackupsRequest], + Awaitable[~.ListBackupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/ListBackups", + request_serializer=service.ListBackupsRequest.serialize, + response_deserializer=service.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup( + self, + ) -> Callable[[service.GetBackupRequest], Awaitable[resources.Backup]]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a single Backup. + + Returns: + Callable[[~.GetBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/GetBackup", + request_serializer=service.GetBackupRequest.serialize, + response_deserializer=resources.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def create_backup( + self, + ) -> Callable[[service.CreateBackupRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create backup method over gRPC. + + Creates a new Backup in a given project and location. + + Returns: + Callable[[~.CreateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup" not in self._stubs: + self._stubs["create_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/CreateBackup", + request_serializer=service.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup"] + + @property + def update_backup( + self, + ) -> Callable[[service.UpdateBackupRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update backup method over gRPC. + + Updates the parameters of a single Backup. + + Returns: + Callable[[~.UpdateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/UpdateBackup", + request_serializer=service.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def delete_backup( + self, + ) -> Callable[[service.DeleteBackupRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a single Backup. + + Returns: + Callable[[~.DeleteBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/DeleteBackup", + request_serializer=service.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def list_supported_database_flags( + self, + ) -> Callable[ + [service.ListSupportedDatabaseFlagsRequest], + Awaitable[service.ListSupportedDatabaseFlagsResponse], + ]: + r"""Return a callable for the list supported database flags method over gRPC. + + Lists SupportedDatabaseFlags for a given project and + location. + + Returns: + Callable[[~.ListSupportedDatabaseFlagsRequest], + Awaitable[~.ListSupportedDatabaseFlagsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_supported_database_flags" not in self._stubs: + self._stubs[ + "list_supported_database_flags" + ] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1.AlloyDBAdmin/ListSupportedDatabaseFlags", + request_serializer=service.ListSupportedDatabaseFlagsRequest.serialize, + response_deserializer=service.ListSupportedDatabaseFlagsResponse.deserialize, + ) + return self._stubs["list_supported_database_flags"] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("AlloyDBAdminGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/rest.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/rest.py new file mode 100644 index 000000000000..5c28e0d9b423 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/rest.py @@ -0,0 +1,3387 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.alloydb_v1.types import resources, service + +from .base import AlloyDBAdminTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AlloyDBAdminRestInterceptor: + """Interceptor for AlloyDBAdmin. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AlloyDBAdminRestTransport. + + .. code-block:: python + class MyCustomAlloyDBAdminInterceptor(AlloyDBAdminRestInterceptor): + def pre_batch_create_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_failover_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_failover_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_clusters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_clusters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_supported_database_flags(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_supported_database_flags(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_restart_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restart_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_restore_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AlloyDBAdminRestTransport(interceptor=MyCustomAlloyDBAdminInterceptor()) + client = AlloyDBAdminClient(transport=transport) + + + """ + + def pre_batch_create_instances( + self, + request: service.BatchCreateInstancesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.BatchCreateInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_create_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_batch_create_instances( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for batch_create_instances + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_create_backup( + self, request: service.CreateBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.CreateBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_create_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_backup + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_create_cluster( + self, request: service.CreateClusterRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.CreateClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_create_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_create_instance( + self, + request: service.CreateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_create_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_backup( + self, request: service.DeleteBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.DeleteBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_delete_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_cluster( + self, request: service.DeleteClusterRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.DeleteClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_delete_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_instance( + self, + request: service.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_delete_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_failover_instance( + self, + request: service.FailoverInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.FailoverInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for failover_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_failover_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for failover_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_backup( + self, request: service.GetBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_backup(self, response: resources.Backup) -> resources.Backup: + """Post-rpc interceptor for get_backup + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_cluster( + self, request: service.GetClusterRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_cluster(self, response: resources.Cluster) -> resources.Cluster: + """Post-rpc interceptor for get_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_instance( + self, request: service.GetInstanceRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_instance(self, response: resources.Instance) -> resources.Instance: + """Post-rpc interceptor for get_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_backups( + self, request: service.ListBackupsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListBackupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backups + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_backups( + self, response: service.ListBackupsResponse + ) -> service.ListBackupsResponse: + """Post-rpc interceptor for list_backups + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_clusters( + self, request: service.ListClustersRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListClustersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_clusters + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_clusters( + self, response: service.ListClustersResponse + ) -> service.ListClustersResponse: + """Post-rpc interceptor for list_clusters + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_instances( + self, request: service.ListInstancesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_instances( + self, response: service.ListInstancesResponse + ) -> service.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_supported_database_flags( + self, + request: service.ListSupportedDatabaseFlagsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.ListSupportedDatabaseFlagsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_supported_database_flags + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_supported_database_flags( + self, response: service.ListSupportedDatabaseFlagsResponse + ) -> service.ListSupportedDatabaseFlagsResponse: + """Post-rpc interceptor for list_supported_database_flags + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_restart_instance( + self, + request: service.RestartInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.RestartInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for restart_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_restart_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for restart_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_restore_cluster( + self, + request: service.RestoreClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.RestoreClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for restore_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_restore_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_update_backup( + self, request: service.UpdateBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.UpdateBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_update_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backup + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_update_cluster( + self, request: service.UpdateClusterRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.UpdateClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_update_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_update_instance( + self, + request: service.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AlloyDBAdminRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AlloyDBAdminRestInterceptor + + +class AlloyDBAdminRestTransport(AlloyDBAdminTransport): + """REST backend transport for AlloyDBAdmin. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "alloydb.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AlloyDBAdminRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AlloyDBAdminRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _BatchCreateInstances(AlloyDBAdminRestStub): + def __hash__(self): + return hash("BatchCreateInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.BatchCreateInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the batch create instances method over HTTP. + + Args: + request (~.service.BatchCreateInstancesRequest): + The request object. Message for creating a batch of + instances under the specified cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/clusters/*}/instances:batchCreate", + "body": "requests", + }, + ] + request, metadata = self._interceptor.pre_batch_create_instances( + request, metadata + ) + pb_request = service.BatchCreateInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_create_instances(resp) + return resp + + class _CreateBackup(AlloyDBAdminRestStub): + def __hash__(self): + return hash("CreateBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backup method over HTTP. + + Args: + request (~.service.CreateBackupRequest): + The request object. Message for creating a Backup + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/backups", + "body": "backup", + }, + ] + request, metadata = self._interceptor.pre_create_backup(request, metadata) + pb_request = service.CreateBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup(resp) + return resp + + class _CreateCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("CreateCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "clusterId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create cluster method over HTTP. + + Args: + request (~.service.CreateClusterRequest): + The request object. Message for creating a Cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/clusters", + "body": "cluster", + }, + ] + request, metadata = self._interceptor.pre_create_cluster(request, metadata) + pb_request = service.CreateClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_cluster(resp) + return resp + + class _CreateInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("CreateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.service.CreateInstanceRequest): + The request object. Message for creating a Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/clusters/*}/instances", + "body": "instance", + }, + ] + request, metadata = self._interceptor.pre_create_instance(request, metadata) + pb_request = service.CreateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_instance(resp) + return resp + + class _DeleteBackup(AlloyDBAdminRestStub): + def __hash__(self): + return hash("DeleteBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.DeleteBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup method over HTTP. + + Args: + request (~.service.DeleteBackupRequest): + The request object. Message for deleting a Backup + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backups/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup(request, metadata) + pb_request = service.DeleteBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup(resp) + return resp + + class _DeleteCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("DeleteCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.DeleteClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete cluster method over HTTP. + + Args: + request (~.service.DeleteClusterRequest): + The request object. Message for deleting a Cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_cluster(request, metadata) + pb_request = service.DeleteClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_cluster(resp) + return resp + + class _DeleteInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("DeleteInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete instance method over HTTP. + + Args: + request (~.service.DeleteInstanceRequest): + The request object. Message for deleting a Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/instances/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_instance(request, metadata) + pb_request = service.DeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_instance(resp) + return resp + + class _FailoverInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("FailoverInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.FailoverInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the failover instance method over HTTP. + + Args: + request (~.service.FailoverInstanceRequest): + The request object. Message for triggering failover on an + Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/instances/*}:failover", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_failover_instance( + request, metadata + ) + pb_request = service.FailoverInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_failover_instance(resp) + return resp + + class _GetBackup(AlloyDBAdminRestStub): + def __hash__(self): + return hash("GetBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Backup: + r"""Call the get backup method over HTTP. + + Args: + request (~.service.GetBackupRequest): + The request object. Message for getting a Backup + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Backup: + Message describing Backup object + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backups/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup(request, metadata) + pb_request = service.GetBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Backup() + pb_resp = resources.Backup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup(resp) + return resp + + class _GetCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("GetCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Cluster: + r"""Call the get cluster method over HTTP. + + Args: + request (~.service.GetClusterRequest): + The request object. Message for getting a Cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Cluster: + A cluster is a collection of regional + AlloyDB resources. It can include a + primary instance and one or more read + pool instances. All cluster resources + share a storage layer, which scales as + needed. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/clusters/*}", + }, + ] + request, metadata = self._interceptor.pre_get_cluster(request, metadata) + pb_request = service.GetClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Cluster() + pb_resp = resources.Cluster.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cluster(resp) + return resp + + class _GetInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("GetInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.service.GetInstanceRequest): + The request object. Message for getting a Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Instance: + An Instance is a computing unit that + an end customer can connect to. It's the + main unit of computing resources in + AlloyDB. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/instances/*}", + }, + ] + request, metadata = self._interceptor.pre_get_instance(request, metadata) + pb_request = service.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Instance() + pb_resp = resources.Instance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance(resp) + return resp + + class _ListBackups(AlloyDBAdminRestStub): + def __hash__(self): + return hash("ListBackups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListBackupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListBackupsResponse: + r"""Call the list backups method over HTTP. + + Args: + request (~.service.ListBackupsRequest): + The request object. Message for requesting list of + Backups + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListBackupsResponse: + Message for response to listing + Backups + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backups", + }, + ] + request, metadata = self._interceptor.pre_list_backups(request, metadata) + pb_request = service.ListBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListBackupsResponse() + pb_resp = service.ListBackupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backups(resp) + return resp + + class _ListClusters(AlloyDBAdminRestStub): + def __hash__(self): + return hash("ListClusters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListClustersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListClustersResponse: + r"""Call the list clusters method over HTTP. + + Args: + request (~.service.ListClustersRequest): + The request object. Message for requesting list of + Clusters + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListClustersResponse: + Message for response to listing + Clusters + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/clusters", + }, + ] + request, metadata = self._interceptor.pre_list_clusters(request, metadata) + pb_request = service.ListClustersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListClustersResponse() + pb_resp = service.ListClustersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_clusters(resp) + return resp + + class _ListInstances(AlloyDBAdminRestStub): + def __hash__(self): + return hash("ListInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.service.ListInstancesRequest): + The request object. Message for requesting list of + Instances + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListInstancesResponse: + Message for response to listing + Instances + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/clusters/*}/instances", + }, + ] + request, metadata = self._interceptor.pre_list_instances(request, metadata) + pb_request = service.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListInstancesResponse() + pb_resp = service.ListInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instances(resp) + return resp + + class _ListSupportedDatabaseFlags(AlloyDBAdminRestStub): + def __hash__(self): + return hash("ListSupportedDatabaseFlags") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListSupportedDatabaseFlagsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListSupportedDatabaseFlagsResponse: + r"""Call the list supported database + flags method over HTTP. + + Args: + request (~.service.ListSupportedDatabaseFlagsRequest): + The request object. Message for listing the information + about the supported Database flags. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListSupportedDatabaseFlagsResponse: + Message for response to listing + SupportedDatabaseFlags. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/supportedDatabaseFlags", + }, + ] + request, metadata = self._interceptor.pre_list_supported_database_flags( + request, metadata + ) + pb_request = service.ListSupportedDatabaseFlagsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListSupportedDatabaseFlagsResponse() + pb_resp = service.ListSupportedDatabaseFlagsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_supported_database_flags(resp) + return resp + + class _RestartInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("RestartInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.RestartInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the restart instance method over HTTP. + + Args: + request (~.service.RestartInstanceRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/clusters/*/instances/*}:restart", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_restart_instance( + request, metadata + ) + pb_request = service.RestartInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restart_instance(resp) + return resp + + class _RestoreCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("RestoreCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.RestoreClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the restore cluster method over HTTP. + + Args: + request (~.service.RestoreClusterRequest): + The request object. Message for restoring a Cluster from + a backup or another cluster at a given + point in time. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/clusters:restore", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_restore_cluster(request, metadata) + pb_request = service.RestoreClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restore_cluster(resp) + return resp + + class _UpdateBackup(AlloyDBAdminRestStub): + def __hash__(self): + return hash("UpdateBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update backup method over HTTP. + + Args: + request (~.service.UpdateBackupRequest): + The request object. Message for updating a Backup + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{backup.name=projects/*/locations/*/backups/*}", + "body": "backup", + }, + ] + request, metadata = self._interceptor.pre_update_backup(request, metadata) + pb_request = service.UpdateBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_backup(resp) + return resp + + class _UpdateCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("UpdateCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update cluster method over HTTP. + + Args: + request (~.service.UpdateClusterRequest): + The request object. Message for updating a Cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{cluster.name=projects/*/locations/*/clusters/*}", + "body": "cluster", + }, + ] + request, metadata = self._interceptor.pre_update_cluster(request, metadata) + pb_request = service.UpdateClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_cluster(resp) + return resp + + class _UpdateInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("UpdateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.service.UpdateInstanceRequest): + The request object. Message for updating a Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{instance.name=projects/*/locations/*/clusters/*/instances/*}", + "body": "instance", + }, + ] + request, metadata = self._interceptor.pre_update_instance(request, metadata) + pb_request = service.UpdateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_instance(resp) + return resp + + @property + def batch_create_instances( + self, + ) -> Callable[[service.BatchCreateInstancesRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup( + self, + ) -> Callable[[service.CreateBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_cluster( + self, + ) -> Callable[[service.CreateClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_instance( + self, + ) -> Callable[[service.CreateInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup( + self, + ) -> Callable[[service.DeleteBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_cluster( + self, + ) -> Callable[[service.DeleteClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance( + self, + ) -> Callable[[service.DeleteInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def failover_instance( + self, + ) -> Callable[[service.FailoverInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup(self) -> Callable[[service.GetBackupRequest], resources.Backup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_cluster(self) -> Callable[[service.GetClusterRequest], resources.Cluster]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance( + self, + ) -> Callable[[service.GetInstanceRequest], resources.Instance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backups( + self, + ) -> Callable[[service.ListBackupsRequest], service.ListBackupsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_clusters( + self, + ) -> Callable[[service.ListClustersRequest], service.ListClustersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListClusters(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances( + self, + ) -> Callable[[service.ListInstancesRequest], service.ListInstancesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_supported_database_flags( + self, + ) -> Callable[ + [service.ListSupportedDatabaseFlagsRequest], + service.ListSupportedDatabaseFlagsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSupportedDatabaseFlags(self._session, self._host, self._interceptor) # type: ignore + + @property + def restart_instance( + self, + ) -> Callable[[service.RestartInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestartInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def restore_cluster( + self, + ) -> Callable[[service.RestoreClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup( + self, + ) -> Callable[[service.UpdateBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_cluster( + self, + ) -> Callable[[service.UpdateClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance( + self, + ) -> Callable[[service.UpdateInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(AlloyDBAdminRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(AlloyDBAdminRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(AlloyDBAdminRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(AlloyDBAdminRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(AlloyDBAdminRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(AlloyDBAdminRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AlloyDBAdminRestTransport",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/__init__.py new file mode 100644 index 000000000000..e13f2628252b --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/__init__.py @@ -0,0 +1,106 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .resources import ( + AutomatedBackupPolicy, + Backup, + BackupSource, + Cluster, + DatabaseVersion, + EncryptionConfig, + EncryptionInfo, + Instance, + InstanceView, + MigrationSource, + SslConfig, + SupportedDatabaseFlag, + UserPassword, +) +from .service import ( + BatchCreateInstancesMetadata, + BatchCreateInstancesRequest, + BatchCreateInstancesResponse, + BatchCreateInstanceStatus, + CreateBackupRequest, + CreateClusterRequest, + CreateInstanceRequest, + CreateInstanceRequests, + DeleteBackupRequest, + DeleteClusterRequest, + DeleteInstanceRequest, + FailoverInstanceRequest, + GetBackupRequest, + GetClusterRequest, + GetInstanceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListClustersRequest, + ListClustersResponse, + ListInstancesRequest, + ListInstancesResponse, + ListSupportedDatabaseFlagsRequest, + ListSupportedDatabaseFlagsResponse, + OperationMetadata, + RestartInstanceRequest, + RestoreClusterRequest, + UpdateBackupRequest, + UpdateClusterRequest, + UpdateInstanceRequest, +) + +__all__ = ( + "AutomatedBackupPolicy", + "Backup", + "BackupSource", + "Cluster", + "EncryptionConfig", + "EncryptionInfo", + "Instance", + "MigrationSource", + "SslConfig", + "SupportedDatabaseFlag", + "UserPassword", + "DatabaseVersion", + "InstanceView", + "BatchCreateInstancesMetadata", + "BatchCreateInstancesRequest", + "BatchCreateInstancesResponse", + "BatchCreateInstanceStatus", + "CreateBackupRequest", + "CreateClusterRequest", + "CreateInstanceRequest", + "CreateInstanceRequests", + "DeleteBackupRequest", + "DeleteClusterRequest", + "DeleteInstanceRequest", + "FailoverInstanceRequest", + "GetBackupRequest", + "GetClusterRequest", + "GetInstanceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "ListClustersRequest", + "ListClustersResponse", + "ListInstancesRequest", + "ListInstancesResponse", + "ListSupportedDatabaseFlagsRequest", + "ListSupportedDatabaseFlagsResponse", + "OperationMetadata", + "RestartInstanceRequest", + "RestoreClusterRequest", + "UpdateBackupRequest", + "UpdateClusterRequest", + "UpdateInstanceRequest", +) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py new file mode 100644 index 000000000000..b90a643a651f --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py @@ -0,0 +1,1580 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.alloydb.v1", + manifest={ + "DatabaseVersion", + "InstanceView", + "UserPassword", + "MigrationSource", + "EncryptionConfig", + "EncryptionInfo", + "SslConfig", + "AutomatedBackupPolicy", + "BackupSource", + "Cluster", + "Instance", + "Backup", + "SupportedDatabaseFlag", + }, +) + + +class DatabaseVersion(proto.Enum): + r"""The supported database engine versions. + + Values: + DATABASE_VERSION_UNSPECIFIED (0): + This is an unknown database version. + POSTGRES_13 (1): + DEPRECATED - The database version is Postgres + 13. + POSTGRES_14 (2): + The database version is Postgres 14. + """ + DATABASE_VERSION_UNSPECIFIED = 0 + POSTGRES_13 = 1 + POSTGRES_14 = 2 + + +class InstanceView(proto.Enum): + r"""View on Instance. Pass this enum to rpcs that returns an + Instance message to control which subsets of fields to get. + + Values: + INSTANCE_VIEW_UNSPECIFIED (0): + INSTANCE_VIEW_UNSPECIFIED Not specified, equivalent to + BASIC. + INSTANCE_VIEW_BASIC (1): + BASIC server responses for a primary or read + instance include all the relevant instance + details, excluding the details of each node in + the instance. The default value. + INSTANCE_VIEW_FULL (2): + FULL response is equivalent to BASIC for + primary instance (for now). For read pool + instance, this includes details of each node in + the pool. + """ + INSTANCE_VIEW_UNSPECIFIED = 0 + INSTANCE_VIEW_BASIC = 1 + INSTANCE_VIEW_FULL = 2 + + +class UserPassword(proto.Message): + r"""The username/password for a database user. Used for + specifying initial users at cluster creation time. + + Attributes: + user (str): + The database username. + password (str): + The initial password for the user. + """ + + user: str = proto.Field( + proto.STRING, + number=1, + ) + password: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MigrationSource(proto.Message): + r"""Subset of the source instance configuration that is available + when reading the cluster resource. + + Attributes: + host_port (str): + Output only. The host and port of the + on-premises instance in host:port format + reference_id (str): + Output only. Place holder for the external + source identifier(e.g DMS job name) that created + the cluster. + source_type (google.cloud.alloydb_v1.types.MigrationSource.MigrationSourceType): + Output only. Type of migration source. + """ + + class MigrationSourceType(proto.Enum): + r"""Denote the type of migration source that created this + cluster. + + Values: + MIGRATION_SOURCE_TYPE_UNSPECIFIED (0): + Migration source is unknown. + DMS (1): + DMS source means the cluster was created via + DMS migration job. + """ + MIGRATION_SOURCE_TYPE_UNSPECIFIED = 0 + DMS = 1 + + host_port: str = proto.Field( + proto.STRING, + number=1, + ) + reference_id: str = proto.Field( + proto.STRING, + number=2, + ) + source_type: MigrationSourceType = proto.Field( + proto.ENUM, + number=3, + enum=MigrationSourceType, + ) + + +class EncryptionConfig(proto.Message): + r"""EncryptionConfig describes the encryption config of a cluster + or a backup that is encrypted with a CMEK (customer-managed + encryption key). + + Attributes: + kms_key_name (str): + The fully-qualified resource name of the KMS key. Each Cloud + KMS key is regionalized and has the following format: + projects/[PROJECT]/locations/[REGION]/keyRings/[RING]/cryptoKeys/[KEY_NAME] + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class EncryptionInfo(proto.Message): + r"""EncryptionInfo describes the encryption information of a + cluster or a backup. + + Attributes: + encryption_type (google.cloud.alloydb_v1.types.EncryptionInfo.Type): + Output only. Type of encryption. + kms_key_versions (MutableSequence[str]): + Output only. Cloud KMS key versions that are + being used to protect the database or the + backup. + """ + + class Type(proto.Enum): + r"""Possible encryption types. + + Values: + TYPE_UNSPECIFIED (0): + Encryption type not specified. Defaults to + GOOGLE_DEFAULT_ENCRYPTION. + GOOGLE_DEFAULT_ENCRYPTION (1): + The data is encrypted at rest with a key that + is fully managed by Google. No key version will + be populated. This is the default state. + CUSTOMER_MANAGED_ENCRYPTION (2): + The data is encrypted at rest with a key that + is managed by the customer. KMS key versions + will be populated. + """ + TYPE_UNSPECIFIED = 0 + GOOGLE_DEFAULT_ENCRYPTION = 1 + CUSTOMER_MANAGED_ENCRYPTION = 2 + + encryption_type: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + kms_key_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class SslConfig(proto.Message): + r"""SSL configuration for an AlloyDB Cluster. + + Attributes: + ssl_mode (google.cloud.alloydb_v1.types.SslConfig.SslMode): + Optional. SSL mode. Specifies client-server + SSL/TLS connection behavior. + ca_source (google.cloud.alloydb_v1.types.SslConfig.CaSource): + Optional. Certificate Authority (CA) source. Only + CA_SOURCE_MANAGED is supported currently, and is the default + value. + """ + + class SslMode(proto.Enum): + r"""SSL mode options. + + Values: + SSL_MODE_UNSPECIFIED (0): + SSL mode not specified. Defaults to SSL_MODE_ALLOW. + SSL_MODE_ALLOW (1): + SSL connections are optional. CA verification + not enforced. + SSL_MODE_REQUIRE (2): + SSL connections are required. CA verification + not enforced. Clients may use locally + self-signed certificates (default psql client + behavior). + SSL_MODE_VERIFY_CA (3): + SSL connections are required. CA verification + enforced. Clients must have certificates signed + by a Cluster CA, e.g. via + GenerateClientCertificate. + """ + SSL_MODE_UNSPECIFIED = 0 + SSL_MODE_ALLOW = 1 + SSL_MODE_REQUIRE = 2 + SSL_MODE_VERIFY_CA = 3 + + class CaSource(proto.Enum): + r"""Certificate Authority (CA) source for SSL/TLS certificates. + + Values: + CA_SOURCE_UNSPECIFIED (0): + Certificate Authority (CA) source not specified. Defaults to + CA_SOURCE_MANAGED. + CA_SOURCE_MANAGED (1): + Certificate Authority (CA) managed by the + AlloyDB Cluster. + """ + CA_SOURCE_UNSPECIFIED = 0 + CA_SOURCE_MANAGED = 1 + + ssl_mode: SslMode = proto.Field( + proto.ENUM, + number=1, + enum=SslMode, + ) + ca_source: CaSource = proto.Field( + proto.ENUM, + number=2, + enum=CaSource, + ) + + +class AutomatedBackupPolicy(proto.Message): + r"""Message describing the user-specified automated backup + policy. + All fields in the automated backup policy are optional. Defaults + for each field are provided if they are not set. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + weekly_schedule (google.cloud.alloydb_v1.types.AutomatedBackupPolicy.WeeklySchedule): + Weekly schedule for the Backup. + + This field is a member of `oneof`_ ``schedule``. + time_based_retention (google.cloud.alloydb_v1.types.AutomatedBackupPolicy.TimeBasedRetention): + Time-based Backup retention policy. + + This field is a member of `oneof`_ ``retention``. + quantity_based_retention (google.cloud.alloydb_v1.types.AutomatedBackupPolicy.QuantityBasedRetention): + Quantity-based Backup retention policy to + retain recent backups. + + This field is a member of `oneof`_ ``retention``. + enabled (bool): + Whether automated automated backups are + enabled. If not set, defaults to true. + + This field is a member of `oneof`_ ``_enabled``. + backup_window (google.protobuf.duration_pb2.Duration): + The length of the time window during which a + backup can be taken. If a backup does not + succeed within this time window, it will be + canceled and considered failed. + + The backup window must be at least 5 minutes + long. There is no upper bound on the window. If + not set, it defaults to 1 hour. + encryption_config (google.cloud.alloydb_v1.types.EncryptionConfig): + Optional. The encryption config can be + specified to encrypt the backups with a + customer-managed encryption key (CMEK). When + this field is not specified, the backup will + then use default encryption scheme to protect + the user data. + location (str): + The location where the backup will be stored. + Currently, the only supported option is to store + the backup in the same region as the cluster. + If empty, defaults to the region of the cluster. + labels (MutableMapping[str, str]): + Labels to apply to backups created using this + configuration. + """ + + class WeeklySchedule(proto.Message): + r"""A weekly schedule starts a backup at prescribed start times within a + day, for the specified days of the week. + + The weekly schedule message is flexible and can be used to create + many types of schedules. For example, to have a daily backup that + starts at 22:00, configure the ``start_times`` field to have one + element "22:00" and the ``days_of_week`` field to have all seven + days of the week. + + Attributes: + start_times (MutableSequence[google.type.timeofday_pb2.TimeOfDay]): + The times during the day to start a backup. + The start times are assumed to be in UTC and to + be an exact hour (e.g., 04:00:00). + If no start times are provided, a single fixed + start time is chosen arbitrarily. + days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + The days of the week to perform a backup. + If this field is left empty, the default of + every day of the week is used. + """ + + start_times: MutableSequence[timeofday_pb2.TimeOfDay] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=timeofday_pb2.TimeOfDay, + ) + days_of_week: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=dayofweek_pb2.DayOfWeek, + ) + + class TimeBasedRetention(proto.Message): + r"""A time based retention policy specifies that all backups + within a certain time period should be retained. + + Attributes: + retention_period (google.protobuf.duration_pb2.Duration): + The retention period. + """ + + retention_period: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + class QuantityBasedRetention(proto.Message): + r"""A quantity based policy specifies that a certain number of + the most recent successful backups should be retained. + + Attributes: + count (int): + The number of backups to retain. + """ + + count: int = proto.Field( + proto.INT32, + number=1, + ) + + weekly_schedule: WeeklySchedule = proto.Field( + proto.MESSAGE, + number=2, + oneof="schedule", + message=WeeklySchedule, + ) + time_based_retention: TimeBasedRetention = proto.Field( + proto.MESSAGE, + number=4, + oneof="retention", + message=TimeBasedRetention, + ) + quantity_based_retention: QuantityBasedRetention = proto.Field( + proto.MESSAGE, + number=5, + oneof="retention", + message=QuantityBasedRetention, + ) + enabled: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + backup_window: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + encryption_config: "EncryptionConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="EncryptionConfig", + ) + location: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + + +class BackupSource(proto.Message): + r"""Message describing a BackupSource. + + Attributes: + backup_uid (str): + Output only. The system-generated UID of the + backup which was used to create this resource. + The UID is generated when the backup is created, + and it is retained until the backup is deleted. + backup_name (str): + Required. The name of the backup resource with the format: + + - projects/{project}/locations/{region}/backups/{backup_id} + """ + + backup_uid: str = proto.Field( + proto.STRING, + number=2, + ) + backup_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Cluster(proto.Message): + r"""A cluster is a collection of regional AlloyDB resources. It + can include a primary instance and one or more read pool + instances. All cluster resources share a storage layer, which + scales as needed. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backup_source (google.cloud.alloydb_v1.types.BackupSource): + Output only. Cluster created from backup. + + This field is a member of `oneof`_ ``source``. + migration_source (google.cloud.alloydb_v1.types.MigrationSource): + Output only. Cluster created via DMS + migration. + + This field is a member of `oneof`_ ``source``. + name (str): + Output only. The name of the cluster resource with the + format: + + - projects/{project}/locations/{region}/clusters/{cluster_id} + where the cluster ID segment should satisfy the regex + expression ``[a-z0-9-]+``. For more details see + https://google.aip.dev/122. The prefix of the cluster + resource name is the name of the parent resource: + - projects/{project}/locations/{region} + display_name (str): + User-settable and human-readable display name + for the Cluster. + uid (str): + Output only. The system-generated UID of the + resource. The UID is assigned when the resource + is created, and it is retained until it is + deleted. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Delete time stamp + labels (MutableMapping[str, str]): + Labels as key value pairs + state (google.cloud.alloydb_v1.types.Cluster.State): + Output only. The current serving state of the + cluster. + cluster_type (google.cloud.alloydb_v1.types.Cluster.ClusterType): + Output only. The type of the cluster. This is an output-only + field and it's populated at the Cluster creation time or the + Cluster promotion time. The cluster type is determined by + which RPC was used to create the cluster (i.e. + ``CreateCluster`` vs. ``CreateSecondaryCluster`` + database_version (google.cloud.alloydb_v1.types.DatabaseVersion): + Output only. The database engine major + version. This is an output-only field and it's + populated at the Cluster creation time. This + field cannot be changed after cluster creation. + network (str): + Required. The resource link for the VPC network in which + cluster resources are created and from which they are + accessible via Private IP. The network must belong to the + same project as the cluster. It is specified in the form: + "projects/{project_number}/global/networks/{network_id}". + This is required to create a cluster. It can be updated, but + it cannot be removed. + etag (str): + For Resource freshness validation + (https://google.aip.dev/154) + annotations (MutableMapping[str, str]): + Annotations to allow client tools to store + small amount of arbitrary data. This is distinct + from labels. https://google.aip.dev/128 + reconciling (bool): + Output only. Reconciling + (https://google.aip.dev/128#reconciliation). Set + to true if the current state of Cluster does not + match the user's intended state, and the service + is actively updating the resource to reconcile + them. This can happen due to user-triggered + updates or system actions like failover or + maintenance. + initial_user (google.cloud.alloydb_v1.types.UserPassword): + Input only. Initial user to setup during cluster creation. + Required. If used in ``RestoreCluster`` this is ignored. + automated_backup_policy (google.cloud.alloydb_v1.types.AutomatedBackupPolicy): + The automated backup policy for this cluster. + If no policy is provided then the default policy + will be used. If backups are supported for the + cluster, the default policy takes one backup a + day, has a backup window of 1 hour, and retains + backups for 14 days. For more information on the + defaults, consult the documentation for the + message type. + ssl_config (google.cloud.alloydb_v1.types.SslConfig): + SSL configuration for this AlloyDB Cluster. + encryption_config (google.cloud.alloydb_v1.types.EncryptionConfig): + Optional. The encryption config can be + specified to encrypt the data disks and other + persistent data resources of a cluster with a + customer-managed encryption key (CMEK). When + this field is not specified, the cluster will + then use default encryption scheme to protect + the user data. + encryption_info (google.cloud.alloydb_v1.types.EncryptionInfo): + Output only. The encryption information for + the cluster. + secondary_config (google.cloud.alloydb_v1.types.Cluster.SecondaryConfig): + Cross Region replication config specific to + SECONDARY cluster. + primary_config (google.cloud.alloydb_v1.types.Cluster.PrimaryConfig): + Output only. Cross Region replication config + specific to PRIMARY cluster. + """ + + class State(proto.Enum): + r"""Cluster State + + Values: + STATE_UNSPECIFIED (0): + The state of the cluster is unknown. + READY (1): + The cluster is active and running. + STOPPED (2): + The cluster is stopped. All instances in the + cluster are stopped. Customers can start a + stopped cluster at any point and all their + instances will come back to life with same names + and IP resources. In this state, customer pays + for storage. + Associated backups could also be present in a + stopped cluster. + EMPTY (3): + The cluster is empty and has no associated + resources. All instances, associated storage and + backups have been deleted. + CREATING (4): + The cluster is being created. + DELETING (5): + The cluster is being deleted. + FAILED (6): + The creation of the cluster failed. + BOOTSTRAPPING (7): + The cluster is bootstrapping with data from + some other source. Direct mutations to the + cluster (e.g. adding read pool) are not allowed. + MAINTENANCE (8): + The cluster is under maintenance. AlloyDB + regularly performs maintenance and upgrades on + customer clusters. Updates on the cluster are + not allowed while the cluster is in this state. + PROMOTING (9): + The cluster is being promoted. + """ + STATE_UNSPECIFIED = 0 + READY = 1 + STOPPED = 2 + EMPTY = 3 + CREATING = 4 + DELETING = 5 + FAILED = 6 + BOOTSTRAPPING = 7 + MAINTENANCE = 8 + PROMOTING = 9 + + class ClusterType(proto.Enum): + r"""Type of Cluster + + Values: + CLUSTER_TYPE_UNSPECIFIED (0): + The type of the cluster is unknown. + PRIMARY (1): + Primary cluster that support read and write + operations. + SECONDARY (2): + Secondary cluster that is replicating from + another region. This only supports read. + """ + CLUSTER_TYPE_UNSPECIFIED = 0 + PRIMARY = 1 + SECONDARY = 2 + + class SecondaryConfig(proto.Message): + r"""Configuration information for the secondary cluster. This + should be set if and only if the cluster is of type SECONDARY. + + Attributes: + primary_cluster_name (str): + The name of the primary cluster name with the format: + + - projects/{project}/locations/{region}/clusters/{cluster_id} + """ + + primary_cluster_name: str = proto.Field( + proto.STRING, + number=1, + ) + + class PrimaryConfig(proto.Message): + r"""Configuration for the primary cluster. It has the list of + clusters that are replicating from this cluster. This should be + set if and only if the cluster is of type PRIMARY. + + Attributes: + secondary_cluster_names (MutableSequence[str]): + Output only. Names of the clusters that are + replicating from this cluster. + """ + + secondary_cluster_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + backup_source: "BackupSource" = proto.Field( + proto.MESSAGE, + number=15, + oneof="source", + message="BackupSource", + ) + migration_source: "MigrationSource" = proto.Field( + proto.MESSAGE, + number=16, + oneof="source", + message="MigrationSource", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + cluster_type: ClusterType = proto.Field( + proto.ENUM, + number=24, + enum=ClusterType, + ) + database_version: "DatabaseVersion" = proto.Field( + proto.ENUM, + number=9, + enum="DatabaseVersion", + ) + network: str = proto.Field( + proto.STRING, + number=10, + ) + etag: str = proto.Field( + proto.STRING, + number=11, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=12, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=13, + ) + initial_user: "UserPassword" = proto.Field( + proto.MESSAGE, + number=14, + message="UserPassword", + ) + automated_backup_policy: "AutomatedBackupPolicy" = proto.Field( + proto.MESSAGE, + number=17, + message="AutomatedBackupPolicy", + ) + ssl_config: "SslConfig" = proto.Field( + proto.MESSAGE, + number=18, + message="SslConfig", + ) + encryption_config: "EncryptionConfig" = proto.Field( + proto.MESSAGE, + number=19, + message="EncryptionConfig", + ) + encryption_info: "EncryptionInfo" = proto.Field( + proto.MESSAGE, + number=20, + message="EncryptionInfo", + ) + secondary_config: SecondaryConfig = proto.Field( + proto.MESSAGE, + number=22, + message=SecondaryConfig, + ) + primary_config: PrimaryConfig = proto.Field( + proto.MESSAGE, + number=23, + message=PrimaryConfig, + ) + + +class Instance(proto.Message): + r"""An Instance is a computing unit that an end customer can + connect to. It's the main unit of computing resources in + AlloyDB. + + Attributes: + name (str): + Output only. The name of the instance resource with the + format: + + - projects/{project}/locations/{region}/clusters/{cluster_id}/instances/{instance_id} + where the cluster and instance ID segments should satisfy + the regex expression ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?``, + e.g. 1-63 characters of lowercase letters, numbers, and + dashes, starting with a letter, and ending with a letter + or number. For more details see + https://google.aip.dev/122. The prefix of the instance + resource name is the name of the parent resource: + - projects/{project}/locations/{region}/clusters/{cluster_id} + display_name (str): + User-settable and human-readable display name + for the Instance. + uid (str): + Output only. The system-generated UID of the + resource. The UID is assigned when the resource + is created, and it is retained until it is + deleted. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Delete time stamp + labels (MutableMapping[str, str]): + Labels as key value pairs + state (google.cloud.alloydb_v1.types.Instance.State): + Output only. The current serving state of the + instance. + instance_type (google.cloud.alloydb_v1.types.Instance.InstanceType): + Required. The type of the instance. Specified + at creation time. + machine_config (google.cloud.alloydb_v1.types.Instance.MachineConfig): + Configurations for the machines that host the + underlying database engine. + availability_type (google.cloud.alloydb_v1.types.Instance.AvailabilityType): + Availability type of an Instance. + Defaults to REGIONAL for both primary and read + instances. Note that primary and read instances + can have different availability types. + gce_zone (str): + The Compute Engine zone that the instance + should serve from, per + https://cloud.google.com/compute/docs/regions-zones + This can ONLY be specified for ZONAL instances. + If present for a REGIONAL instance, an error + will be thrown. If this is absent for a ZONAL + instance, instance is created in a random zone + with available capacity. + database_flags (MutableMapping[str, str]): + Database flags. Set at instance level. + + - They are copied from primary instance on read instance + creation. + - Read instances can set new or override existing flags + that are relevant for reads, e.g. for enabling columnar + cache on a read instance. Flags set on read instance may + or may not be present on primary. + + This is a list of "key": "value" pairs. "key": The name of + the flag. These flags are passed at instance setup time, so + include both server options and system variables for + Postgres. Flags are specified with underscores, not hyphens. + "value": The value of the flag. Booleans are set to **on** + for true and **off** for false. This field must be omitted + if the flag doesn't take a value. + writable_node (google.cloud.alloydb_v1.types.Instance.Node): + Output only. This is set for the read-write + VM of the PRIMARY instance only. + nodes (MutableSequence[google.cloud.alloydb_v1.types.Instance.Node]): + Output only. List of available read-only VMs + in this instance, including the standby for a + PRIMARY instance. + query_insights_config (google.cloud.alloydb_v1.types.Instance.QueryInsightsInstanceConfig): + Configuration for query insights. + read_pool_config (google.cloud.alloydb_v1.types.Instance.ReadPoolConfig): + Read pool specific config. + ip_address (str): + Output only. The IP address for the Instance. + This is the connection endpoint for an end-user + application. + reconciling (bool): + Output only. Reconciling + (https://google.aip.dev/128#reconciliation). Set + to true if the current state of Instance does + not match the user's intended state, and the + service is actively updating the resource to + reconcile them. This can happen due to + user-triggered updates or system actions like + failover or maintenance. + etag (str): + For Resource freshness validation + (https://google.aip.dev/154) + annotations (MutableMapping[str, str]): + Annotations to allow client tools to store + small amount of arbitrary data. This is distinct + from labels. https://google.aip.dev/128 + """ + + class State(proto.Enum): + r"""Instance State + + Values: + STATE_UNSPECIFIED (0): + The state of the instance is unknown. + READY (1): + The instance is active and running. + STOPPED (2): + The instance is stopped. Instance name and IP + resources are preserved. + CREATING (3): + The instance is being created. + DELETING (4): + The instance is being deleted. + MAINTENANCE (5): + The instance is down for maintenance. + FAILED (6): + The creation of the instance failed or a + fatal error occurred during an operation on the + instance. Note: Instances in this state would + tried to be auto-repaired. And Customers should + be able to restart, update or delete these + instances. + BOOTSTRAPPING (8): + Index 7 is used in the producer apis for ROLLED_BACK state. + Keeping that index unused in case that state also needs to + exposed via consumer apis in future. The instance has been + configured to sync data from some other source. + PROMOTING (9): + The instance is being promoted. + """ + STATE_UNSPECIFIED = 0 + READY = 1 + STOPPED = 2 + CREATING = 3 + DELETING = 4 + MAINTENANCE = 5 + FAILED = 6 + BOOTSTRAPPING = 8 + PROMOTING = 9 + + class InstanceType(proto.Enum): + r"""Type of an Instance + + Values: + INSTANCE_TYPE_UNSPECIFIED (0): + The type of the instance is unknown. + PRIMARY (1): + PRIMARY instances support read and write + operations. + READ_POOL (2): + READ POOL instances support read operations only. Each read + pool instance consists of one or more homogeneous nodes. + + - Read pool of size 1 can only have zonal availability. + - Read pools with node count of 2 or more can have regional + availability (nodes are present in 2 or more zones in a + region). + SECONDARY (3): + SECONDARY instances support read operations + only. SECONDARY instance is a cross-region read + replica + """ + INSTANCE_TYPE_UNSPECIFIED = 0 + PRIMARY = 1 + READ_POOL = 2 + SECONDARY = 3 + + class AvailabilityType(proto.Enum): + r"""The Availability type of an instance. Potential values: + - ZONAL: The instance serves data from only one zone. Outages in + that zone affect instance availability. + - REGIONAL: The instance can serve data from more than one zone + in a region (it is highly available). + + Values: + AVAILABILITY_TYPE_UNSPECIFIED (0): + This is an unknown Availability type. + ZONAL (1): + Zonal available instance. + REGIONAL (2): + Regional (or Highly) available instance. + """ + AVAILABILITY_TYPE_UNSPECIFIED = 0 + ZONAL = 1 + REGIONAL = 2 + + class MachineConfig(proto.Message): + r"""MachineConfig describes the configuration of a machine. + + Attributes: + cpu_count (int): + The number of CPU's in the VM instance. + """ + + cpu_count: int = proto.Field( + proto.INT32, + number=1, + ) + + class Node(proto.Message): + r"""Details of a single node in the instance. + Nodes in an AlloyDB instance are ephemereal, they can change + during update, failover, autohealing and resize operations. + + Attributes: + zone_id (str): + The Compute Engine zone of the VM e.g. + "us-central1-b". + id (str): + The identifier of the VM e.g. + "test-read-0601-407e52be-ms3l". + ip (str): + The private IP address of the VM e.g. + "10.57.0.34". + state (str): + Determined by state of the compute VM and + postgres-service health. Compute VM state can + have values listed in + https://cloud.google.com/compute/docs/instances/instance-life-cycle + and postgres-service health can have values: + HEALTHY and UNHEALTHY. + """ + + zone_id: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + ip: str = proto.Field( + proto.STRING, + number=3, + ) + state: str = proto.Field( + proto.STRING, + number=4, + ) + + class QueryInsightsInstanceConfig(proto.Message): + r"""QueryInsights Instance specific configuration. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + record_application_tags (bool): + Record application tags for an instance. + This flag is turned "on" by default. + + This field is a member of `oneof`_ ``_record_application_tags``. + record_client_address (bool): + Record client address for an instance. Client + address is PII information. This flag is turned + "on" by default. + + This field is a member of `oneof`_ ``_record_client_address``. + query_string_length (int): + Query string length. The default value is + 1024. Any integer between 256 and 4500 is + considered valid. + query_plans_per_minute (int): + Number of query execution plans captured by + Insights per minute for all queries combined. + The default value is 5. Any integer between 0 + and 20 is considered valid. + + This field is a member of `oneof`_ ``_query_plans_per_minute``. + """ + + record_application_tags: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) + record_client_address: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + query_string_length: int = proto.Field( + proto.UINT32, + number=4, + ) + query_plans_per_minute: int = proto.Field( + proto.UINT32, + number=5, + optional=True, + ) + + class ReadPoolConfig(proto.Message): + r"""Configuration for a read pool instance. + + Attributes: + node_count (int): + Read capacity, i.e. number of nodes in a read + pool instance. + """ + + node_count: int = proto.Field( + proto.INT32, + number=1, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + instance_type: InstanceType = proto.Field( + proto.ENUM, + number=9, + enum=InstanceType, + ) + machine_config: MachineConfig = proto.Field( + proto.MESSAGE, + number=10, + message=MachineConfig, + ) + availability_type: AvailabilityType = proto.Field( + proto.ENUM, + number=11, + enum=AvailabilityType, + ) + gce_zone: str = proto.Field( + proto.STRING, + number=12, + ) + database_flags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + writable_node: Node = proto.Field( + proto.MESSAGE, + number=19, + message=Node, + ) + nodes: MutableSequence[Node] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message=Node, + ) + query_insights_config: QueryInsightsInstanceConfig = proto.Field( + proto.MESSAGE, + number=21, + message=QueryInsightsInstanceConfig, + ) + read_pool_config: ReadPoolConfig = proto.Field( + proto.MESSAGE, + number=14, + message=ReadPoolConfig, + ) + ip_address: str = proto.Field( + proto.STRING, + number=15, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=16, + ) + etag: str = proto.Field( + proto.STRING, + number=17, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=18, + ) + + +class Backup(proto.Message): + r"""Message describing Backup object + + Attributes: + name (str): + Output only. The name of the backup resource with the + format: + + - projects/{project}/locations/{region}/backups/{backup_id} + where the cluster and backup ID segments should satisfy + the regex expression ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?``, + e.g. 1-63 characters of lowercase letters, numbers, and + dashes, starting with a letter, and ending with a letter + or number. For more details see + https://google.aip.dev/122. The prefix of the backup + resource name is the name of the parent resource: + - projects/{project}/locations/{region} + display_name (str): + User-settable and human-readable display name + for the Backup. + uid (str): + Output only. The system-generated UID of the + resource. The UID is assigned when the resource + is created, and it is retained until it is + deleted. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Delete time stamp + labels (MutableMapping[str, str]): + Labels as key value pairs + state (google.cloud.alloydb_v1.types.Backup.State): + Output only. The current state of the backup. + type_ (google.cloud.alloydb_v1.types.Backup.Type): + The backup type, which suggests the trigger + for the backup. + description (str): + User-provided description of the backup. + cluster_uid (str): + Output only. The system-generated UID of the + cluster which was used to create this resource. + cluster_name (str): + Required. The full resource name of the backup source + cluster (e.g., projects//locations//clusters/). + reconciling (bool): + Output only. Reconciling + (https://google.aip.dev/128#reconciliation), if + true, indicates that the service is actively + updating the resource. This can happen due to + user-triggered updates or system actions like + failover or maintenance. + encryption_config (google.cloud.alloydb_v1.types.EncryptionConfig): + Optional. The encryption config can be + specified to encrypt the backup with a + customer-managed encryption key (CMEK). When + this field is not specified, the backup will + then use default encryption scheme to protect + the user data. + encryption_info (google.cloud.alloydb_v1.types.EncryptionInfo): + Output only. The encryption information for + the backup. + etag (str): + For Resource freshness validation + (https://google.aip.dev/154) + annotations (MutableMapping[str, str]): + Annotations to allow client tools to store + small amount of arbitrary data. This is distinct + from labels. https://google.aip.dev/128 + size_bytes (int): + Output only. The size of the backup in bytes. + expiry_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which after the backup is eligible + to be garbage collected. It is the duration specified by the + backup's retention policy, added to the backup's + create_time. + """ + + class State(proto.Enum): + r"""Backup State + + Values: + STATE_UNSPECIFIED (0): + The state of the backup is unknown. + READY (1): + The backup is ready. + CREATING (2): + The backup is creating. + FAILED (3): + The backup failed. + DELETING (4): + The backup is being deleted. + """ + STATE_UNSPECIFIED = 0 + READY = 1 + CREATING = 2 + FAILED = 3 + DELETING = 4 + + class Type(proto.Enum): + r"""Backup Type + + Values: + TYPE_UNSPECIFIED (0): + Backup Type is unknown. + ON_DEMAND (1): + ON_DEMAND backups that were triggered by the customer (e.g., + not AUTOMATED). + AUTOMATED (2): + AUTOMATED backups triggered by the automated + backups scheduler pursuant to an automated + backup policy. + CONTINUOUS (3): + CONTINUOUS backups triggered by the automated + backups scheduler due to a continuous backup + policy. + """ + TYPE_UNSPECIFIED = 0 + ON_DEMAND = 1 + AUTOMATED = 2 + CONTINUOUS = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=15, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + type_: Type = proto.Field( + proto.ENUM, + number=8, + enum=Type, + ) + description: str = proto.Field( + proto.STRING, + number=9, + ) + cluster_uid: str = proto.Field( + proto.STRING, + number=18, + ) + cluster_name: str = proto.Field( + proto.STRING, + number=10, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=11, + ) + encryption_config: "EncryptionConfig" = proto.Field( + proto.MESSAGE, + number=12, + message="EncryptionConfig", + ) + encryption_info: "EncryptionInfo" = proto.Field( + proto.MESSAGE, + number=13, + message="EncryptionInfo", + ) + etag: str = proto.Field( + proto.STRING, + number=14, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=16, + ) + size_bytes: int = proto.Field( + proto.INT64, + number=17, + ) + expiry_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=19, + message=timestamp_pb2.Timestamp, + ) + + +class SupportedDatabaseFlag(proto.Message): + r"""SupportedDatabaseFlag gives general information about a database + flag, like type and allowed values. This is a static value that is + defined on the server side, and it cannot be modified by callers. To + set the Database flags on a particular Instance, a caller should + modify the Instance.database_flags field. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + string_restrictions (google.cloud.alloydb_v1.types.SupportedDatabaseFlag.StringRestrictions): + Restriction on STRING type value. + + This field is a member of `oneof`_ ``restrictions``. + integer_restrictions (google.cloud.alloydb_v1.types.SupportedDatabaseFlag.IntegerRestrictions): + Restriction on INTEGER type value. + + This field is a member of `oneof`_ ``restrictions``. + name (str): + The name of the flag resource, following Google Cloud + conventions, e.g.: + + - projects/{project}/locations/{location}/flags/{flag} This + field currently has no semantic meaning. + flag_name (str): + The name of the database flag, e.g. "max_allowed_packets". + The is a possibly key for the Instance.database_flags map + field. + value_type (google.cloud.alloydb_v1.types.SupportedDatabaseFlag.ValueType): + + accepts_multiple_values (bool): + Whether the database flag accepts multiple + values. If true, a comma-separated list of + stringified values may be specified. + supported_db_versions (MutableSequence[google.cloud.alloydb_v1.types.DatabaseVersion]): + Major database engine versions for which this + flag is supported. + requires_db_restart (bool): + Whether setting or updating this flag on an + Instance requires a database restart. If a flag + that requires database restart is set, the + backend will automatically restart the database + (making sure to satisfy any availability SLO's). + """ + + class ValueType(proto.Enum): + r"""ValueType describes the semantic type of the value that the flag + accepts. Regardless of the ValueType, the Instance.database_flags + field accepts the stringified version of the value, i.e. "20" or + "3.14". + + Values: + VALUE_TYPE_UNSPECIFIED (0): + This is an unknown flag type. + STRING (1): + String type flag. + INTEGER (2): + Integer type flag. + FLOAT (3): + Float type flag. + NONE (4): + Denotes that the flag does not accept any + values. + """ + VALUE_TYPE_UNSPECIFIED = 0 + STRING = 1 + INTEGER = 2 + FLOAT = 3 + NONE = 4 + + class StringRestrictions(proto.Message): + r"""Restrictions on STRING type values + + Attributes: + allowed_values (MutableSequence[str]): + The list of allowed values, if bounded. This + field will be empty if there is a unbounded + number of allowed values. + """ + + allowed_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class IntegerRestrictions(proto.Message): + r"""Restrictions on INTEGER type values. + + Attributes: + min_value (google.protobuf.wrappers_pb2.Int64Value): + The minimum value that can be specified, if + applicable. + max_value (google.protobuf.wrappers_pb2.Int64Value): + The maximum value that can be specified, if + applicable. + """ + + min_value: wrappers_pb2.Int64Value = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.Int64Value, + ) + max_value: wrappers_pb2.Int64Value = proto.Field( + proto.MESSAGE, + number=2, + message=wrappers_pb2.Int64Value, + ) + + string_restrictions: StringRestrictions = proto.Field( + proto.MESSAGE, + number=7, + oneof="restrictions", + message=StringRestrictions, + ) + integer_restrictions: IntegerRestrictions = proto.Field( + proto.MESSAGE, + number=8, + oneof="restrictions", + message=IntegerRestrictions, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + flag_name: str = proto.Field( + proto.STRING, + number=2, + ) + value_type: ValueType = proto.Field( + proto.ENUM, + number=3, + enum=ValueType, + ) + accepts_multiple_values: bool = proto.Field( + proto.BOOL, + number=4, + ) + supported_db_versions: MutableSequence["DatabaseVersion"] = proto.RepeatedField( + proto.ENUM, + number=5, + enum="DatabaseVersion", + ) + requires_db_restart: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/service.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/service.py new file mode 100644 index 000000000000..a764b924f85b --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/service.py @@ -0,0 +1,1411 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.alloydb_v1.types import resources + +__protobuf__ = proto.module( + package="google.cloud.alloydb.v1", + manifest={ + "ListClustersRequest", + "ListClustersResponse", + "GetClusterRequest", + "CreateClusterRequest", + "UpdateClusterRequest", + "DeleteClusterRequest", + "RestoreClusterRequest", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "CreateInstanceRequests", + "BatchCreateInstancesRequest", + "BatchCreateInstancesResponse", + "BatchCreateInstancesMetadata", + "BatchCreateInstanceStatus", + "UpdateInstanceRequest", + "DeleteInstanceRequest", + "FailoverInstanceRequest", + "RestartInstanceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "GetBackupRequest", + "CreateBackupRequest", + "UpdateBackupRequest", + "DeleteBackupRequest", + "ListSupportedDatabaseFlagsRequest", + "ListSupportedDatabaseFlagsResponse", + "OperationMetadata", + }, +) + + +class ListClustersRequest(proto.Message): + r"""Message for requesting list of Clusters + + Attributes: + parent (str): + Required. The name of the parent resource. For the required + format, see the comment on the Cluster.name field. + Additionally, you can perform an aggregated list operation + by specifying a value with the following format: + + - projects/{project}/locations/- + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Optional. Filtering results + order_by (str): + Optional. Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListClustersResponse(proto.Message): + r"""Message for response to listing Clusters + + Attributes: + clusters (MutableSequence[google.cloud.alloydb_v1.types.Cluster]): + The list of Cluster + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + clusters: MutableSequence[resources.Cluster] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Cluster, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetClusterRequest(proto.Message): + r"""Message for getting a Cluster + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Cluster.name field. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateClusterRequest(proto.Message): + r"""Message for creating a Cluster + + Attributes: + parent (str): + Required. The name of the parent resource. + For the required format, see the comment on the + Cluster.name field. + cluster_id (str): + Required. ID of the requesting object. + cluster (google.cloud.alloydb_v1.types.Cluster): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + create request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=2, + ) + cluster: resources.Cluster = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Cluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class UpdateClusterRequest(proto.Message): + r"""Message for updating a Cluster + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Cluster resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + cluster (google.cloud.alloydb_v1.types.Cluster): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + update request. + allow_missing (bool): + Optional. If set to true, update succeeds even if cluster is + not found. In that case, a new cluster is created and + ``update_mask`` is ignored. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + cluster: resources.Cluster = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Cluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteClusterRequest(proto.Message): + r"""Message for deleting a Cluster + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Cluster.name field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + etag (str): + Optional. The current etag of the Cluster. + If an etag is provided and does not match the + current etag of the Cluster, deletion will be + blocked and an ABORTED error will be returned. + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + delete. + force (bool): + Optional. Whether to cascade delete child + instances for given cluster. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + force: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class RestoreClusterRequest(proto.Message): + r"""Message for restoring a Cluster from a backup or another + cluster at a given point in time. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backup_source (google.cloud.alloydb_v1.types.BackupSource): + Backup source. + + This field is a member of `oneof`_ ``source``. + parent (str): + Required. The name of the parent resource. + For the required format, see the comment on the + Cluster.name field. + cluster_id (str): + Required. ID of the requesting object. + cluster (google.cloud.alloydb_v1.types.Cluster): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + import request. + """ + + backup_source: resources.BackupSource = proto.Field( + proto.MESSAGE, + number=4, + oneof="source", + message=resources.BackupSource, + ) + parent: str = proto.Field( + proto.STRING, + number=1, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=2, + ) + cluster: resources.Cluster = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Cluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=5, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class ListInstancesRequest(proto.Message): + r"""Message for requesting list of Instances + + Attributes: + parent (str): + Required. The name of the parent resource. For the required + format, see the comment on the Instance.name field. + Additionally, you can perform an aggregated list operation + by specifying a value with one of the following formats: + + - projects/{project}/locations/-/clusters/- + - projects/{project}/locations/{region}/clusters/- + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Optional. Filtering results + order_by (str): + Optional. Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInstancesResponse(proto.Message): + r"""Message for response to listing Instances + + Attributes: + instances (MutableSequence[google.cloud.alloydb_v1.types.Instance]): + The list of Instance + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + instances: MutableSequence[resources.Instance] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Instance, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetInstanceRequest(proto.Message): + r"""Message for getting a Instance + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Instance.name field. + view (google.cloud.alloydb_v1.types.InstanceView): + The view of the instance to return. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: resources.InstanceView = proto.Field( + proto.ENUM, + number=2, + enum=resources.InstanceView, + ) + + +class CreateInstanceRequest(proto.Message): + r"""Message for creating a Instance + + Attributes: + parent (str): + Required. The name of the parent resource. + For the required format, see the comment on the + Instance.name field. + instance_id (str): + Required. ID of the requesting object. + instance (google.cloud.alloydb_v1.types.Instance): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + create request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance: resources.Instance = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Instance, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class CreateInstanceRequests(proto.Message): + r"""See usage below for notes. + + Attributes: + create_instance_requests (MutableSequence[google.cloud.alloydb_v1.types.CreateInstanceRequest]): + Required. Primary and read replica instances + to be created. This list should not be empty. + """ + + create_instance_requests: MutableSequence[ + "CreateInstanceRequest" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="CreateInstanceRequest", + ) + + +class BatchCreateInstancesRequest(proto.Message): + r"""Message for creating a batch of instances under the specified + cluster. + + Attributes: + parent (str): + Required. The name of the parent resource. + requests (google.cloud.alloydb_v1.types.CreateInstanceRequests): + Required. Resources being created. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: "CreateInstanceRequests" = proto.Field( + proto.MESSAGE, + number=2, + message="CreateInstanceRequests", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class BatchCreateInstancesResponse(proto.Message): + r"""Message for creating batches of instances in a cluster. + + Attributes: + instances (MutableSequence[google.cloud.alloydb_v1.types.Instance]): + Created instances. + """ + + instances: MutableSequence[resources.Instance] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Instance, + ) + + +class BatchCreateInstancesMetadata(proto.Message): + r"""Message for metadata that is specific to BatchCreateInstances + API. + + Attributes: + instance_targets (MutableSequence[str]): + The instances being created in the API call. + Each string in this list is the server defined + resource path for target instances in the + request and for the format of each string, see + the comment on the Instance.name field. + instance_statuses (MutableMapping[str, google.cloud.alloydb_v1.types.BatchCreateInstanceStatus]): + A map representing state of the instances involved in the + BatchCreateInstances operation during the operation + execution. The instance state will be in STATE_UNSPECIFIED + state if the instance has not yet been picked up for + processing. The key of the map is the name of the instance + resource. For the format, see the comment on the + Instance.name field. + """ + + instance_targets: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + instance_statuses: MutableMapping[ + str, "BatchCreateInstanceStatus" + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message="BatchCreateInstanceStatus", + ) + + +class BatchCreateInstanceStatus(proto.Message): + r"""Message for current status of an instance in the + BatchCreateInstances operation. For example, lets say a + BatchCreateInstances workflow has 4 instances, Instance1 through + Instance4. Lets also assume that 2 instances succeeded but the third + failed to create and the 4th was never picked up for creation + because of failure of the previous one. Then, resulting states would + look something like: + + 1. Instance1 = ROLLED_BACK + 2. Instance2 = ROLLED_BACK + 3. Instance3 = FAILED + 4. Instance4 = FAILED However, while the operation is running, the + instance might be in other states including PENDING_CREATE, + ACTIVE, DELETING and CREATING. The states / do not get further + updated once the operation is done. + + Attributes: + state (google.cloud.alloydb_v1.types.BatchCreateInstanceStatus.State): + The current state of an instance involved in the batch + create operation. Once the operation is complete, the final + state of the instances in the LRO can be one of: + + 1. ACTIVE, indicating that instances were created + successfully + 2. FAILED, indicating that a particular instance failed + creation + 3. ROLLED_BACK indicating that although the instance was + created successfully, it had to be rolled back and + deleted due to failure in other steps of the workflow. + error_msg (str): + DEPRECATED - Use the error field instead. + Error, if any error occurred and is available, + during instance creation. + error (google.rpc.status_pb2.Status): + The RPC status of the instance creation + operation. This field will be present if an + error happened during the instance creation. + type_ (google.cloud.alloydb_v1.types.Instance.InstanceType): + + """ + + class State(proto.Enum): + r"""State contains all valid instance states for the + BatchCreateInstances operation. This is mainly used for status + reporting through the LRO metadata. + + Values: + STATE_UNSPECIFIED (0): + The state of the instance is unknown. + PENDING_CREATE (1): + Instance is pending creation and has not yet + been picked up for processsing in the backend. + READY (2): + The instance is active and running. + CREATING (3): + The instance is being created. + DELETING (4): + The instance is being deleted. + FAILED (5): + The creation of the instance failed or a + fatal error occurred during an operation on the + instance or a batch of instances. + ROLLED_BACK (6): + The instance was created successfully, but + was rolled back and deleted due to some other + failure during BatchCreateInstances operation. + """ + STATE_UNSPECIFIED = 0 + PENDING_CREATE = 1 + READY = 2 + CREATING = 3 + DELETING = 4 + FAILED = 5 + ROLLED_BACK = 6 + + state: State = proto.Field( + proto.ENUM, + number=1, + enum=State, + ) + error_msg: str = proto.Field( + proto.STRING, + number=2, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + type_: resources.Instance.InstanceType = proto.Field( + proto.ENUM, + number=3, + enum=resources.Instance.InstanceType, + ) + + +class UpdateInstanceRequest(proto.Message): + r"""Message for updating a Instance + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Instance resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + instance (google.cloud.alloydb_v1.types.Instance): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + update request. + allow_missing (bool): + Optional. If set to true, update succeeds even if instance + is not found. In that case, a new instance is created and + ``update_mask`` is ignored. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + instance: resources.Instance = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Instance, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteInstanceRequest(proto.Message): + r"""Message for deleting a Instance + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Instance.name field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + etag (str): + Optional. The current etag of the Instance. + If an etag is provided and does not match the + current etag of the Instance, deletion will be + blocked and an ABORTED error will be returned. + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class FailoverInstanceRequest(proto.Message): + r"""Message for triggering failover on an Instance + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Instance.name field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + failover. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class RestartInstanceRequest(proto.Message): + r""" + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Instance.name field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + restart. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ListBackupsRequest(proto.Message): + r"""Message for requesting list of Backups + + Attributes: + parent (str): + Required. Parent value for ListBackupsRequest + page_size (int): + Requested page size. Server may return fewer + items than requested. If unspecified, server + will pick an appropriate default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Filtering results + order_by (str): + Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListBackupsResponse(proto.Message): + r"""Message for response to listing Backups + + Attributes: + backups (MutableSequence[google.cloud.alloydb_v1.types.Backup]): + The list of Backup + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backups: MutableSequence[resources.Backup] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Backup, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupRequest(proto.Message): + r"""Message for getting a Backup + + Attributes: + name (str): + Required. Name of the resource + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateBackupRequest(proto.Message): + r"""Message for creating a Backup + + Attributes: + parent (str): + Required. Value for parent. + backup_id (str): + Required. ID of the requesting object. + backup (google.cloud.alloydb_v1.types.Backup): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, the backend validates the + request, but doesn't actually execute it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup: resources.Backup = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Backup, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class UpdateBackupRequest(proto.Message): + r"""Message for updating a Backup + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The fields + specified in the update_mask are relative to the resource, + not the full request. A field will be overwritten if it is + in the mask. If the user does not provide a mask then all + fields will be overwritten. + backup (google.cloud.alloydb_v1.types.Backup): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, the backend validates the + request, but doesn't actually execute it. + allow_missing (bool): + Optional. If set to true, update succeeds even if instance + is not found. In that case, a new backup is created and + ``update_mask`` is ignored. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + backup: resources.Backup = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Backup, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteBackupRequest(proto.Message): + r"""Message for deleting a Backup + + Attributes: + name (str): + Required. Name of the resource. For the + required format, see the comment on the + Backup.name field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, the backend validates the + request, but doesn't actually execute it. + etag (str): + Optional. The current etag of the Backup. + If an etag is provided and does not match the + current etag of the Backup, deletion will be + blocked and an ABORTED error will be returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSupportedDatabaseFlagsRequest(proto.Message): + r"""Message for listing the information about the supported + Database flags. + + Attributes: + parent (str): + Required. The name of the parent resource. The required + format is: + + - projects/{project}/locations/{location} + + Regardless of the parent specified here, as long it is + contains a valid project and location, the service will + return a static list of supported flags resources. Note that + we do not yet support region-specific flags. + page_size (int): + Requested page size. Server may return fewer + items than requested. If unspecified, server + will pick an appropriate default. + page_token (str): + A token identifying a page of results the + server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSupportedDatabaseFlagsResponse(proto.Message): + r"""Message for response to listing SupportedDatabaseFlags. + + Attributes: + supported_database_flags (MutableSequence[google.cloud.alloydb_v1.types.SupportedDatabaseFlag]): + The list of SupportedDatabaseFlags. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + supported_database_flags: MutableSequence[ + resources.SupportedDatabaseFlag + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.SupportedDatabaseFlag, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + batch_create_instances_metadata (google.cloud.alloydb_v1.types.BatchCreateInstancesMetadata): + Output only. BatchCreateInstances related + metadata. + + This field is a member of `oneof`_ ``request_specific``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + batch_create_instances_metadata: "BatchCreateInstancesMetadata" = proto.Field( + proto.MESSAGE, + number=8, + oneof="request_specific", + message="BatchCreateInstancesMetadata", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/__init__.py new file mode 100644 index 000000000000..1a6c3f60cfa0 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/__init__.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.alloydb_v1alpha import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.alloy_db_admin import AlloyDBAdminAsyncClient, AlloyDBAdminClient +from .types.resources import ( + AutomatedBackupPolicy, + Backup, + BackupSource, + Cluster, + ConnectionInfo, + ContinuousBackupConfig, + ContinuousBackupInfo, + ContinuousBackupSource, + DatabaseVersion, + EncryptionConfig, + EncryptionInfo, + Instance, + InstanceView, + MigrationSource, + SslConfig, + SupportedDatabaseFlag, + UserPassword, +) +from .types.service import ( + BatchCreateInstancesMetadata, + BatchCreateInstancesRequest, + BatchCreateInstancesResponse, + BatchCreateInstanceStatus, + CreateBackupRequest, + CreateClusterRequest, + CreateInstanceRequest, + CreateInstanceRequests, + CreateSecondaryClusterRequest, + CreateSecondaryInstanceRequest, + DeleteBackupRequest, + DeleteClusterRequest, + DeleteInstanceRequest, + FailoverInstanceRequest, + GenerateClientCertificateRequest, + GenerateClientCertificateResponse, + GetBackupRequest, + GetClusterRequest, + GetConnectionInfoRequest, + GetInstanceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListClustersRequest, + ListClustersResponse, + ListInstancesRequest, + ListInstancesResponse, + ListSupportedDatabaseFlagsRequest, + ListSupportedDatabaseFlagsResponse, + OperationMetadata, + PromoteClusterRequest, + RestartInstanceRequest, + RestoreClusterRequest, + UpdateBackupRequest, + UpdateClusterRequest, + UpdateInstanceRequest, +) + +__all__ = ( + "AlloyDBAdminAsyncClient", + "AlloyDBAdminClient", + "AutomatedBackupPolicy", + "Backup", + "BackupSource", + "BatchCreateInstanceStatus", + "BatchCreateInstancesMetadata", + "BatchCreateInstancesRequest", + "BatchCreateInstancesResponse", + "Cluster", + "ConnectionInfo", + "ContinuousBackupConfig", + "ContinuousBackupInfo", + "ContinuousBackupSource", + "CreateBackupRequest", + "CreateClusterRequest", + "CreateInstanceRequest", + "CreateInstanceRequests", + "CreateSecondaryClusterRequest", + "CreateSecondaryInstanceRequest", + "DatabaseVersion", + "DeleteBackupRequest", + "DeleteClusterRequest", + "DeleteInstanceRequest", + "EncryptionConfig", + "EncryptionInfo", + "FailoverInstanceRequest", + "GenerateClientCertificateRequest", + "GenerateClientCertificateResponse", + "GetBackupRequest", + "GetClusterRequest", + "GetConnectionInfoRequest", + "GetInstanceRequest", + "Instance", + "InstanceView", + "ListBackupsRequest", + "ListBackupsResponse", + "ListClustersRequest", + "ListClustersResponse", + "ListInstancesRequest", + "ListInstancesResponse", + "ListSupportedDatabaseFlagsRequest", + "ListSupportedDatabaseFlagsResponse", + "MigrationSource", + "OperationMetadata", + "PromoteClusterRequest", + "RestartInstanceRequest", + "RestoreClusterRequest", + "SslConfig", + "SupportedDatabaseFlag", + "UpdateBackupRequest", + "UpdateClusterRequest", + "UpdateInstanceRequest", + "UserPassword", +) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_metadata.json b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_metadata.json new file mode 100644 index 000000000000..11cb5f713b47 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_metadata.json @@ -0,0 +1,403 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.alloydb_v1alpha", + "protoPackage": "google.cloud.alloydb.v1alpha", + "schema": "1.0", + "services": { + "AlloyDBAdmin": { + "clients": { + "grpc": { + "libraryClient": "AlloyDBAdminClient", + "rpcs": { + "BatchCreateInstances": { + "methods": [ + "batch_create_instances" + ] + }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "CreateSecondaryCluster": { + "methods": [ + "create_secondary_cluster" + ] + }, + "CreateSecondaryInstance": { + "methods": [ + "create_secondary_instance" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "FailoverInstance": { + "methods": [ + "failover_instance" + ] + }, + "GenerateClientCertificate": { + "methods": [ + "generate_client_certificate" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetConnectionInfo": { + "methods": [ + "get_connection_info" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "ListSupportedDatabaseFlags": { + "methods": [ + "list_supported_database_flags" + ] + }, + "PromoteCluster": { + "methods": [ + "promote_cluster" + ] + }, + "RestartInstance": { + "methods": [ + "restart_instance" + ] + }, + "RestoreCluster": { + "methods": [ + "restore_cluster" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AlloyDBAdminAsyncClient", + "rpcs": { + "BatchCreateInstances": { + "methods": [ + "batch_create_instances" + ] + }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "CreateSecondaryCluster": { + "methods": [ + "create_secondary_cluster" + ] + }, + "CreateSecondaryInstance": { + "methods": [ + "create_secondary_instance" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "FailoverInstance": { + "methods": [ + "failover_instance" + ] + }, + "GenerateClientCertificate": { + "methods": [ + "generate_client_certificate" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetConnectionInfo": { + "methods": [ + "get_connection_info" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "ListSupportedDatabaseFlags": { + "methods": [ + "list_supported_database_flags" + ] + }, + "PromoteCluster": { + "methods": [ + "promote_cluster" + ] + }, + "RestartInstance": { + "methods": [ + "restart_instance" + ] + }, + "RestoreCluster": { + "methods": [ + "restore_cluster" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + }, + "rest": { + "libraryClient": "AlloyDBAdminClient", + "rpcs": { + "BatchCreateInstances": { + "methods": [ + "batch_create_instances" + ] + }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "CreateSecondaryCluster": { + "methods": [ + "create_secondary_cluster" + ] + }, + "CreateSecondaryInstance": { + "methods": [ + "create_secondary_instance" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "FailoverInstance": { + "methods": [ + "failover_instance" + ] + }, + "GenerateClientCertificate": { + "methods": [ + "generate_client_certificate" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetConnectionInfo": { + "methods": [ + "get_connection_info" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "ListSupportedDatabaseFlags": { + "methods": [ + "list_supported_database_flags" + ] + }, + "PromoteCluster": { + "methods": [ + "promote_cluster" + ] + }, + "RestartInstance": { + "methods": [ + "restart_instance" + ] + }, + "RestoreCluster": { + "methods": [ + "restore_cluster" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py new file mode 100644 index 000000000000..1b461870ca63 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/py.typed b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/py.typed new file mode 100644 index 000000000000..c71d0f20b482 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-alloydb package uses inline types. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/__init__.py new file mode 100644 index 000000000000..1acb07113321 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AlloyDBAdminAsyncClient +from .client import AlloyDBAdminClient + +__all__ = ( + "AlloyDBAdminClient", + "AlloyDBAdminAsyncClient", +) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py new file mode 100644 index 000000000000..3118b9d42432 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py @@ -0,0 +1,3722 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.alloydb_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.alloydb_v1alpha.services.alloy_db_admin import pagers +from google.cloud.alloydb_v1alpha.types import resources, service + +from .client import AlloyDBAdminClient +from .transports.base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport +from .transports.grpc_asyncio import AlloyDBAdminGrpcAsyncIOTransport + + +class AlloyDBAdminAsyncClient: + """Service describing handlers for resources""" + + _client: AlloyDBAdminClient + + DEFAULT_ENDPOINT = AlloyDBAdminClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AlloyDBAdminClient.DEFAULT_MTLS_ENDPOINT + + backup_path = staticmethod(AlloyDBAdminClient.backup_path) + parse_backup_path = staticmethod(AlloyDBAdminClient.parse_backup_path) + cluster_path = staticmethod(AlloyDBAdminClient.cluster_path) + parse_cluster_path = staticmethod(AlloyDBAdminClient.parse_cluster_path) + connection_info_path = staticmethod(AlloyDBAdminClient.connection_info_path) + parse_connection_info_path = staticmethod( + AlloyDBAdminClient.parse_connection_info_path + ) + crypto_key_version_path = staticmethod(AlloyDBAdminClient.crypto_key_version_path) + parse_crypto_key_version_path = staticmethod( + AlloyDBAdminClient.parse_crypto_key_version_path + ) + instance_path = staticmethod(AlloyDBAdminClient.instance_path) + parse_instance_path = staticmethod(AlloyDBAdminClient.parse_instance_path) + network_path = staticmethod(AlloyDBAdminClient.network_path) + parse_network_path = staticmethod(AlloyDBAdminClient.parse_network_path) + supported_database_flag_path = staticmethod( + AlloyDBAdminClient.supported_database_flag_path + ) + parse_supported_database_flag_path = staticmethod( + AlloyDBAdminClient.parse_supported_database_flag_path + ) + common_billing_account_path = staticmethod( + AlloyDBAdminClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AlloyDBAdminClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AlloyDBAdminClient.common_folder_path) + parse_common_folder_path = staticmethod(AlloyDBAdminClient.parse_common_folder_path) + common_organization_path = staticmethod(AlloyDBAdminClient.common_organization_path) + parse_common_organization_path = staticmethod( + AlloyDBAdminClient.parse_common_organization_path + ) + common_project_path = staticmethod(AlloyDBAdminClient.common_project_path) + parse_common_project_path = staticmethod( + AlloyDBAdminClient.parse_common_project_path + ) + common_location_path = staticmethod(AlloyDBAdminClient.common_location_path) + parse_common_location_path = staticmethod( + AlloyDBAdminClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlloyDBAdminAsyncClient: The constructed client. + """ + return AlloyDBAdminClient.from_service_account_info.__func__(AlloyDBAdminAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlloyDBAdminAsyncClient: The constructed client. + """ + return AlloyDBAdminClient.from_service_account_file.__func__(AlloyDBAdminAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AlloyDBAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AlloyDBAdminTransport: + """Returns the transport used by the client instance. + + Returns: + AlloyDBAdminTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(AlloyDBAdminClient).get_transport_class, type(AlloyDBAdminClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, AlloyDBAdminTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the alloy db admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.AlloyDBAdminTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AlloyDBAdminClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_clusters( + self, + request: Optional[Union[service.ListClustersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListClustersAsyncPager: + r"""Lists Clusters in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_list_clusters(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.ListClustersRequest, dict]]): + The request object. Message for requesting list of + Clusters + parent (:class:`str`): + Required. The name of the parent resource. For the + required format, see the comment on the Cluster.name + field. Additionally, you can perform an aggregated list + operation by specifying a value with the following + format: + + - projects/{project}/locations/- + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListClustersAsyncPager: + Message for response to listing + Clusters + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListClustersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_clusters, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListClustersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_cluster( + self, + request: Optional[Union[service.GetClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Cluster: + r"""Gets details of a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_get_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.GetClusterRequest, dict]]): + The request object. Message for getting a Cluster + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.types.Cluster: + A cluster is a collection of regional + AlloyDB resources. It can include a + primary instance and one or more read + pool instances. All cluster resources + share a storage layer, which scales as + needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_cluster, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_cluster( + self, + request: Optional[Union[service.CreateClusterRequest, dict]] = None, + *, + parent: Optional[str] = None, + cluster: Optional[resources.Cluster] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Cluster in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_create_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.CreateClusterRequest, dict]]): + The request object. Message for creating a Cluster + parent (:class:`str`): + Required. The name of the parent + resource. For the required format, see + the comment on the Cluster.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (:class:`google.cloud.alloydb_v1alpha.types.Cluster`): + Required. The resource being created + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. ID of the requesting + object. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cluster, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cluster is not None: + request.cluster = cluster + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_cluster( + self, + request: Optional[Union[service.UpdateClusterRequest, dict]] = None, + *, + cluster: Optional[resources.Cluster] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_update_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.UpdateClusterRequest, dict]]): + The request object. Message for updating a Cluster + cluster (:class:`google.cloud.alloydb_v1alpha.types.Cluster`): + Required. The resource being updated + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the Cluster resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cluster, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.UpdateClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cluster is not None: + request.cluster = cluster + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("cluster.name", request.cluster.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_cluster( + self, + request: Optional[Union[service.DeleteClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_delete_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.DeleteClusterRequest, dict]]): + The request object. Message for deleting a Cluster + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.DeleteClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def promote_cluster( + self, + request: Optional[Union[service.PromoteClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Promotes a SECONDARY cluster. This turns down + replication from the PRIMARY cluster and promotes a + secondary cluster into its own standalone cluster. + Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_promote_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.PromoteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.promote_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.PromoteClusterRequest, dict]]): + The request object. Message for promoting a Cluster + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.PromoteClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.promote_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def restore_cluster( + self, + request: Optional[Union[service.RestoreClusterRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Cluster in a given project and + location, with a volume restored from the provided + source, either a backup ID or a point-in-time and a + source cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_restore_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup_source = alloydb_v1alpha.BackupSource() + backup_source.backup_name = "backup_name_value" + + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.RestoreClusterRequest( + backup_source=backup_source, + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.restore_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.RestoreClusterRequest, dict]]): + The request object. Message for restoring a Cluster from + a backup or another cluster at a given + point in time. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + request = service.RestoreClusterRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.restore_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_secondary_cluster( + self, + request: Optional[Union[service.CreateSecondaryClusterRequest, dict]] = None, + *, + parent: Optional[str] = None, + cluster: Optional[resources.Cluster] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a cluster of type SECONDARY in the given + location using the primary cluster as the source. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_create_secondary_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.CreateSecondaryClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_secondary_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.CreateSecondaryClusterRequest, dict]]): + The request object. + parent (:class:`str`): + Required. The name of the parent + resource (the primary cluster). For the + required format, see the comment on the + Cluster.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (:class:`google.cloud.alloydb_v1alpha.types.Cluster`): + Required. Configuration of the + requesting object (the secondary + cluster). + + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. ID of the requesting object + (the secondary cluster). + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cluster, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateSecondaryClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cluster is not None: + request.cluster = cluster + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_secondary_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_instances( + self, + request: Optional[Union[service.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesAsyncPager: + r"""Lists Instances in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_list_instances(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.ListInstancesRequest, dict]]): + The request object. Message for requesting list of + Instances + parent (:class:`str`): + Required. The name of the parent resource. For the + required format, see the comment on the Instance.name + field. Additionally, you can perform an aggregated list + operation by specifying a value with one of the + following formats: + + - projects/{project}/locations/-/clusters/- + - projects/{project}/locations/{region}/clusters/- + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListInstancesAsyncPager: + Message for response to listing + Instances + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_instances, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance( + self, + request: Optional[Union[service.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Instance: + r"""Gets details of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_get_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.GetInstanceRequest, dict]]): + The request object. Message for getting a Instance + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.types.Instance: + An Instance is a computing unit that + an end customer can connect to. It's the + main unit of computing resources in + AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_instance, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_instance( + self, + request: Optional[Union[service.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[resources.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Instance in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_create_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1alpha.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.CreateInstanceRequest, dict]]): + The request object. Message for creating a Instance + parent (:class:`str`): + Required. The name of the parent + resource. For the required format, see + the comment on the Instance.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.alloydb_v1alpha.types.Instance`): + Required. The resource being created + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. ID of the requesting + object. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_secondary_instance( + self, + request: Optional[Union[service.CreateSecondaryInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[resources.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new SECONDARY Instance in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_create_secondary_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1alpha.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.CreateSecondaryInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_secondary_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.CreateSecondaryInstanceRequest, dict]]): + The request object. Message for creating a Secondary + Instance + parent (:class:`str`): + Required. The name of the parent + resource. For the required format, see + the comment on the Instance.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.alloydb_v1alpha.types.Instance`): + Required. The resource being created + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. ID of the requesting + object. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateSecondaryInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_secondary_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def batch_create_instances( + self, + request: Optional[Union[service.BatchCreateInstancesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates new instances under the given project, + location and cluster. There can be only one primary + instance in a cluster. If the primary instance exists in + the cluster as well as this request, then API will throw + an error. + The primary instance should exist before any read pool + instance is created. If the primary instance is a part + of the request payload, then the API will take care of + creating instances in the correct order. This method is + here to support Google-internal use cases, and is not + meant for external customers to consume. Please do not + start relying on it; its behavior is subject to change + without notice. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_batch_create_instances(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + requests = alloydb_v1alpha.CreateInstanceRequests() + requests.create_instance_requests.parent = "parent_value" + requests.create_instance_requests.instance_id = "instance_id_value" + requests.create_instance_requests.instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.BatchCreateInstancesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_instances(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.BatchCreateInstancesRequest, dict]]): + The request object. Message for creating a batch of + instances under the specified cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1alpha.types.BatchCreateInstancesResponse` + Message for creating batches of instances in a cluster. + + """ + # Create or coerce a protobuf request object. + request = service.BatchCreateInstancesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_create_instances, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + service.BatchCreateInstancesResponse, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_instance( + self, + request: Optional[Union[service.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[resources.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_update_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1alpha.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.UpdateInstanceRequest, dict]]): + The request object. Message for updating a Instance + instance (:class:`google.cloud.alloydb_v1alpha.types.Instance`): + Required. The resource being updated + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the Instance resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.UpdateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance( + self, + request: Optional[Union[service.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_delete_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.DeleteInstanceRequest, dict]]): + The request object. Message for deleting a Instance + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def failover_instance( + self, + request: Optional[Union[service.FailoverInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Forces a Failover for a highly available instance. + Failover promotes the HA standby instance as the new + primary. Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_failover_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.FailoverInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.FailoverInstanceRequest, dict]]): + The request object. Message for triggering failover on an + Instance + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.FailoverInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.failover_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def restart_instance( + self, + request: Optional[Union[service.RestartInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Restart an Instance in a cluster. + Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_restart_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.RestartInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.restart_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.RestartInstanceRequest, dict]]): + The request object. + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.RestartInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.restart_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_backups( + self, + request: Optional[Union[service.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsAsyncPager: + r"""Lists Backups in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_list_backups(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.ListBackupsRequest, dict]]): + The request object. Message for requesting list of + Backups + parent (:class:`str`): + Required. Parent value for + ListBackupsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListBackupsAsyncPager: + Message for response to listing + Backups + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListBackupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup( + self, + request: Optional[Union[service.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Backup: + r"""Gets details of a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_get_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.GetBackupRequest, dict]]): + The request object. Message for getting a Backup + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.types.Backup: + Message describing Backup object + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_backup( + self, + request: Optional[Union[service.CreateBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup: Optional[resources.Backup] = None, + backup_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Backup in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_create_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup = alloydb_v1alpha.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1alpha.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + backup=backup, + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.CreateBackupRequest, dict]]): + The request object. Message for creating a Backup + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (:class:`google.cloud.alloydb_v1alpha.types.Backup`): + Required. The resource being created + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (:class:`str`): + Required. ID of the requesting + object. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1alpha.types.Backup` + Message describing Backup object + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup, backup_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_backup, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Backup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_backup( + self, + request: Optional[Union[service.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[resources.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_update_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup = alloydb_v1alpha.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1alpha.UpdateBackupRequest( + backup=backup, + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.UpdateBackupRequest, dict]]): + The request object. Message for updating a Backup + backup (:class:`google.cloud.alloydb_v1alpha.types.Backup`): + Required. The resource being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1alpha.types.Backup` + Message describing Backup object + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.UpdateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_backup, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Backup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_backup( + self, + request: Optional[Union[service.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_delete_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.DeleteBackupRequest, dict]]): + The request object. Message for deleting a Backup + name (:class:`str`): + Required. Name of the resource. For + the required format, see the comment on + the Backup.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.DeleteBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_backup, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_supported_database_flags( + self, + request: Optional[ + Union[service.ListSupportedDatabaseFlagsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSupportedDatabaseFlagsAsyncPager: + r"""Lists SupportedDatabaseFlags for a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_list_supported_database_flags(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListSupportedDatabaseFlagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_supported_database_flags(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.ListSupportedDatabaseFlagsRequest, dict]]): + The request object. Message for listing the information + about the supported Database flags. + parent (:class:`str`): + Required. The name of the parent resource. The required + format is: + + - projects/{project}/locations/{location} + + Regardless of the parent specified here, as long it is + contains a valid project and location, the service will + return a static list of supported flags resources. Note + that we do not yet support region-specific flags. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListSupportedDatabaseFlagsAsyncPager: + Message for response to listing + SupportedDatabaseFlags. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListSupportedDatabaseFlagsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_supported_database_flags, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSupportedDatabaseFlagsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def generate_client_certificate( + self, + request: Optional[Union[service.GenerateClientCertificateRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.GenerateClientCertificateResponse: + r"""Generate a client certificate signed by a Cluster CA. + The sole purpose of this endpoint is to support the Auth + Proxy client and the endpoint's behavior is subject to + change without notice, so do not rely on its behavior + remaining constant. Future changes will not break the + Auth Proxy client. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_generate_client_certificate(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GenerateClientCertificateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.generate_client_certificate(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.GenerateClientCertificateRequest, dict]]): + The request object. Message for requests to generate a + client certificate signed by the Cluster + CA. + parent (:class:`str`): + Required. The name of the parent resource. The required + format is: + + - projects/{project}/locations/{location}/clusters/{cluster} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.types.GenerateClientCertificateResponse: + Message returned by a + GenerateClientCertificate operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GenerateClientCertificateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_client_certificate, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_connection_info( + self, + request: Optional[Union[service.GetConnectionInfoRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ConnectionInfo: + r"""Get instance metadata used for a connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_get_connection_info(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetConnectionInfoRequest( + parent="parent_value", + ) + + # Make the request + response = await client.get_connection_info(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.GetConnectionInfoRequest, dict]]): + The request object. Request message for + GetConnectionInfo. + parent (:class:`str`): + Required. The name of the parent + resource. The required format is: + projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.types.ConnectionInfo: + ConnectionInfo singleton resource. + https://google.aip.dev/156 + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetConnectionInfoRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_connection_info, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AlloyDBAdminAsyncClient",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py new file mode 100644 index 000000000000..03938510bff2 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py @@ -0,0 +1,4004 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.alloydb_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.alloydb_v1alpha.services.alloy_db_admin import pagers +from google.cloud.alloydb_v1alpha.types import resources, service + +from .transports.base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport +from .transports.grpc import AlloyDBAdminGrpcTransport +from .transports.grpc_asyncio import AlloyDBAdminGrpcAsyncIOTransport +from .transports.rest import AlloyDBAdminRestTransport + + +class AlloyDBAdminClientMeta(type): + """Metaclass for the AlloyDBAdmin client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[AlloyDBAdminTransport]] + _transport_registry["grpc"] = AlloyDBAdminGrpcTransport + _transport_registry["grpc_asyncio"] = AlloyDBAdminGrpcAsyncIOTransport + _transport_registry["rest"] = AlloyDBAdminRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AlloyDBAdminTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AlloyDBAdminClient(metaclass=AlloyDBAdminClientMeta): + """Service describing handlers for resources""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "alloydb.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlloyDBAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlloyDBAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AlloyDBAdminTransport: + """Returns the transport used by the client instance. + + Returns: + AlloyDBAdminTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def backup_path( + project: str, + location: str, + backup: str, + ) -> str: + """Returns a fully-qualified backup string.""" + return "projects/{project}/locations/{location}/backups/{backup}".format( + project=project, + location=location, + backup=backup, + ) + + @staticmethod + def parse_backup_path(path: str) -> Dict[str, str]: + """Parses a backup path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def cluster_path( + project: str, + location: str, + cluster: str, + ) -> str: + """Returns a fully-qualified cluster string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, + location=location, + cluster=cluster, + ) + + @staticmethod + def parse_cluster_path(path: str) -> Dict[str, str]: + """Parses a cluster path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def connection_info_path( + project: str, + location: str, + cluster: str, + instance: str, + ) -> str: + """Returns a fully-qualified connection_info string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance}/connectionInfo".format( + project=project, + location=location, + cluster=cluster, + instance=instance, + ) + + @staticmethod + def parse_connection_info_path(path: str) -> Dict[str, str]: + """Parses a connection_info path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)/instances/(?P.+?)/connectionInfo$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def crypto_key_version_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + crypto_key_version: str, + ) -> str: + """Returns a fully-qualified crypto_key_version string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + crypto_key_version=crypto_key_version, + ) + + @staticmethod + def parse_crypto_key_version_path(path: str) -> Dict[str, str]: + """Parses a crypto_key_version path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)/cryptoKeyVersions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def instance_path( + project: str, + location: str, + cluster: str, + instance: str, + ) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance}".format( + project=project, + location=location, + cluster=cluster, + instance=instance, + ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str, str]: + """Parses a instance path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)/instances/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def network_path( + project: str, + network: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def supported_database_flag_path( + project: str, + location: str, + flag: str, + ) -> str: + """Returns a fully-qualified supported_database_flag string.""" + return "projects/{project}/locations/{location}/flags/{flag}".format( + project=project, + location=location, + flag=flag, + ) + + @staticmethod + def parse_supported_database_flag_path(path: str) -> Dict[str, str]: + """Parses a supported_database_flag path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/flags/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AlloyDBAdminTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the alloy db admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, AlloyDBAdminTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, AlloyDBAdminTransport): + # transport is a AlloyDBAdminTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_clusters( + self, + request: Optional[Union[service.ListClustersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListClustersPager: + r"""Lists Clusters in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_list_clusters(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.ListClustersRequest, dict]): + The request object. Message for requesting list of + Clusters + parent (str): + Required. The name of the parent resource. For the + required format, see the comment on the Cluster.name + field. Additionally, you can perform an aggregated list + operation by specifying a value with the following + format: + + - projects/{project}/locations/- + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListClustersPager: + Message for response to listing + Clusters + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListClustersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListClustersRequest): + request = service.ListClustersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_clusters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListClustersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_cluster( + self, + request: Optional[Union[service.GetClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Cluster: + r"""Gets details of a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_get_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.GetClusterRequest, dict]): + The request object. Message for getting a Cluster + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.types.Cluster: + A cluster is a collection of regional + AlloyDB resources. It can include a + primary instance and one or more read + pool instances. All cluster resources + share a storage layer, which scales as + needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetClusterRequest): + request = service.GetClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_cluster( + self, + request: Optional[Union[service.CreateClusterRequest, dict]] = None, + *, + parent: Optional[str] = None, + cluster: Optional[resources.Cluster] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Cluster in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_create_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.CreateClusterRequest, dict]): + The request object. Message for creating a Cluster + parent (str): + Required. The name of the parent + resource. For the required format, see + the comment on the Cluster.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (google.cloud.alloydb_v1alpha.types.Cluster): + Required. The resource being created + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. ID of the requesting + object. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cluster, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateClusterRequest): + request = service.CreateClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cluster is not None: + request.cluster = cluster + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_cluster( + self, + request: Optional[Union[service.UpdateClusterRequest, dict]] = None, + *, + cluster: Optional[resources.Cluster] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_update_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.UpdateClusterRequest, dict]): + The request object. Message for updating a Cluster + cluster (google.cloud.alloydb_v1alpha.types.Cluster): + Required. The resource being updated + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Cluster resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cluster, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateClusterRequest): + request = service.UpdateClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cluster is not None: + request.cluster = cluster + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("cluster.name", request.cluster.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_cluster( + self, + request: Optional[Union[service.DeleteClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_delete_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.DeleteClusterRequest, dict]): + The request object. Message for deleting a Cluster + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteClusterRequest): + request = service.DeleteClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def promote_cluster( + self, + request: Optional[Union[service.PromoteClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Promotes a SECONDARY cluster. This turns down + replication from the PRIMARY cluster and promotes a + secondary cluster into its own standalone cluster. + Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_promote_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.PromoteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.promote_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.PromoteClusterRequest, dict]): + The request object. Message for promoting a Cluster + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.PromoteClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.PromoteClusterRequest): + request = service.PromoteClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.promote_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def restore_cluster( + self, + request: Optional[Union[service.RestoreClusterRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Cluster in a given project and + location, with a volume restored from the provided + source, either a backup ID or a point-in-time and a + source cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_restore_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + backup_source = alloydb_v1alpha.BackupSource() + backup_source.backup_name = "backup_name_value" + + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.RestoreClusterRequest( + backup_source=backup_source, + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.restore_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.RestoreClusterRequest, dict]): + The request object. Message for restoring a Cluster from + a backup or another cluster at a given + point in time. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.RestoreClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.RestoreClusterRequest): + request = service.RestoreClusterRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_secondary_cluster( + self, + request: Optional[Union[service.CreateSecondaryClusterRequest, dict]] = None, + *, + parent: Optional[str] = None, + cluster: Optional[resources.Cluster] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a cluster of type SECONDARY in the given + location using the primary cluster as the source. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_create_secondary_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.CreateSecondaryClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_secondary_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.CreateSecondaryClusterRequest, dict]): + The request object. + parent (str): + Required. The name of the parent + resource (the primary cluster). For the + required format, see the comment on the + Cluster.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (google.cloud.alloydb_v1alpha.types.Cluster): + Required. Configuration of the + requesting object (the secondary + cluster). + + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. ID of the requesting object + (the secondary cluster). + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cluster, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateSecondaryClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateSecondaryClusterRequest): + request = service.CreateSecondaryClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cluster is not None: + request.cluster = cluster + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_secondary_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_instances( + self, + request: Optional[Union[service.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists Instances in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_list_instances(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.ListInstancesRequest, dict]): + The request object. Message for requesting list of + Instances + parent (str): + Required. The name of the parent resource. For the + required format, see the comment on the Instance.name + field. Additionally, you can perform an aggregated list + operation by specifying a value with one of the + following formats: + + - projects/{project}/locations/-/clusters/- + - projects/{project}/locations/{region}/clusters/- + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListInstancesPager: + Message for response to listing + Instances + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListInstancesRequest): + request = service.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance( + self, + request: Optional[Union[service.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Instance: + r"""Gets details of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_get_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.GetInstanceRequest, dict]): + The request object. Message for getting a Instance + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.types.Instance: + An Instance is a computing unit that + an end customer can connect to. It's the + main unit of computing resources in + AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetInstanceRequest): + request = service.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance( + self, + request: Optional[Union[service.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[resources.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Instance in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_create_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1alpha.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.CreateInstanceRequest, dict]): + The request object. Message for creating a Instance + parent (str): + Required. The name of the parent + resource. For the required format, see + the comment on the Instance.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.alloydb_v1alpha.types.Instance): + Required. The resource being created + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. ID of the requesting + object. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateInstanceRequest): + request = service.CreateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_secondary_instance( + self, + request: Optional[Union[service.CreateSecondaryInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[resources.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new SECONDARY Instance in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_create_secondary_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1alpha.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.CreateSecondaryInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_secondary_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.CreateSecondaryInstanceRequest, dict]): + The request object. Message for creating a Secondary + Instance + parent (str): + Required. The name of the parent + resource. For the required format, see + the comment on the Instance.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.alloydb_v1alpha.types.Instance): + Required. The resource being created + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. ID of the requesting + object. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateSecondaryInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateSecondaryInstanceRequest): + request = service.CreateSecondaryInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_secondary_instance + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def batch_create_instances( + self, + request: Optional[Union[service.BatchCreateInstancesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates new instances under the given project, + location and cluster. There can be only one primary + instance in a cluster. If the primary instance exists in + the cluster as well as this request, then API will throw + an error. + The primary instance should exist before any read pool + instance is created. If the primary instance is a part + of the request payload, then the API will take care of + creating instances in the correct order. This method is + here to support Google-internal use cases, and is not + meant for external customers to consume. Please do not + start relying on it; its behavior is subject to change + without notice. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_batch_create_instances(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + requests = alloydb_v1alpha.CreateInstanceRequests() + requests.create_instance_requests.parent = "parent_value" + requests.create_instance_requests.instance_id = "instance_id_value" + requests.create_instance_requests.instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.BatchCreateInstancesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_instances(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.BatchCreateInstancesRequest, dict]): + The request object. Message for creating a batch of + instances under the specified cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1alpha.types.BatchCreateInstancesResponse` + Message for creating batches of instances in a cluster. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.BatchCreateInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.BatchCreateInstancesRequest): + request = service.BatchCreateInstancesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_create_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + service.BatchCreateInstancesResponse, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_instance( + self, + request: Optional[Union[service.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[resources.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_update_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1alpha.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.UpdateInstanceRequest, dict]): + The request object. Message for updating a Instance + instance (google.cloud.alloydb_v1alpha.types.Instance): + Required. The resource being updated + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Instance resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateInstanceRequest): + request = service.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_instance( + self, + request: Optional[Union[service.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_delete_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.DeleteInstanceRequest, dict]): + The request object. Message for deleting a Instance + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteInstanceRequest): + request = service.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def failover_instance( + self, + request: Optional[Union[service.FailoverInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Forces a Failover for a highly available instance. + Failover promotes the HA standby instance as the new + primary. Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_failover_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.FailoverInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.FailoverInstanceRequest, dict]): + The request object. Message for triggering failover on an + Instance + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.FailoverInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.FailoverInstanceRequest): + request = service.FailoverInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.failover_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def restart_instance( + self, + request: Optional[Union[service.RestartInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Restart an Instance in a cluster. + Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_restart_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.RestartInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.restart_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.RestartInstanceRequest, dict]): + The request object. + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.RestartInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.RestartInstanceRequest): + request = service.RestartInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restart_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_backups( + self, + request: Optional[Union[service.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsPager: + r"""Lists Backups in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_list_backups(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.ListBackupsRequest, dict]): + The request object. Message for requesting list of + Backups + parent (str): + Required. Parent value for + ListBackupsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListBackupsPager: + Message for response to listing + Backups + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListBackupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListBackupsRequest): + request = service.ListBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup( + self, + request: Optional[Union[service.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Backup: + r"""Gets details of a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_get_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.GetBackupRequest, dict]): + The request object. Message for getting a Backup + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.types.Backup: + Message describing Backup object + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetBackupRequest): + request = service.GetBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_backup( + self, + request: Optional[Union[service.CreateBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup: Optional[resources.Backup] = None, + backup_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Backup in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_create_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + backup = alloydb_v1alpha.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1alpha.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + backup=backup, + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.CreateBackupRequest, dict]): + The request object. Message for creating a Backup + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (google.cloud.alloydb_v1alpha.types.Backup): + Required. The resource being created + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (str): + Required. ID of the requesting + object. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1alpha.types.Backup` + Message describing Backup object + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup, backup_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateBackupRequest): + request = service.CreateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Backup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_backup( + self, + request: Optional[Union[service.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[resources.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_update_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + backup = alloydb_v1alpha.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1alpha.UpdateBackupRequest( + backup=backup, + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.UpdateBackupRequest, dict]): + The request object. Message for updating a Backup + backup (google.cloud.alloydb_v1alpha.types.Backup): + Required. The resource being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1alpha.types.Backup` + Message describing Backup object + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateBackupRequest): + request = service.UpdateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Backup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_backup( + self, + request: Optional[Union[service.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_delete_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.DeleteBackupRequest, dict]): + The request object. Message for deleting a Backup + name (str): + Required. Name of the resource. For + the required format, see the comment on + the Backup.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteBackupRequest): + request = service.DeleteBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_supported_database_flags( + self, + request: Optional[ + Union[service.ListSupportedDatabaseFlagsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSupportedDatabaseFlagsPager: + r"""Lists SupportedDatabaseFlags for a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_list_supported_database_flags(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListSupportedDatabaseFlagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_supported_database_flags(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.ListSupportedDatabaseFlagsRequest, dict]): + The request object. Message for listing the information + about the supported Database flags. + parent (str): + Required. The name of the parent resource. The required + format is: + + - projects/{project}/locations/{location} + + Regardless of the parent specified here, as long it is + contains a valid project and location, the service will + return a static list of supported flags resources. Note + that we do not yet support region-specific flags. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListSupportedDatabaseFlagsPager: + Message for response to listing + SupportedDatabaseFlags. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListSupportedDatabaseFlagsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListSupportedDatabaseFlagsRequest): + request = service.ListSupportedDatabaseFlagsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_supported_database_flags + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSupportedDatabaseFlagsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def generate_client_certificate( + self, + request: Optional[Union[service.GenerateClientCertificateRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.GenerateClientCertificateResponse: + r"""Generate a client certificate signed by a Cluster CA. + The sole purpose of this endpoint is to support the Auth + Proxy client and the endpoint's behavior is subject to + change without notice, so do not rely on its behavior + remaining constant. Future changes will not break the + Auth Proxy client. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_generate_client_certificate(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GenerateClientCertificateRequest( + parent="parent_value", + ) + + # Make the request + response = client.generate_client_certificate(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.GenerateClientCertificateRequest, dict]): + The request object. Message for requests to generate a + client certificate signed by the Cluster + CA. + parent (str): + Required. The name of the parent resource. The required + format is: + + - projects/{project}/locations/{location}/clusters/{cluster} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.types.GenerateClientCertificateResponse: + Message returned by a + GenerateClientCertificate operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GenerateClientCertificateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GenerateClientCertificateRequest): + request = service.GenerateClientCertificateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.generate_client_certificate + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_connection_info( + self, + request: Optional[Union[service.GetConnectionInfoRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ConnectionInfo: + r"""Get instance metadata used for a connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_get_connection_info(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetConnectionInfoRequest( + parent="parent_value", + ) + + # Make the request + response = client.get_connection_info(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.GetConnectionInfoRequest, dict]): + The request object. Request message for + GetConnectionInfo. + parent (str): + Required. The name of the parent + resource. The required format is: + projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.types.ConnectionInfo: + ConnectionInfo singleton resource. + https://google.aip.dev/156 + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetConnectionInfoRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetConnectionInfoRequest): + request = service.GetConnectionInfoRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_connection_info] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AlloyDBAdminClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AlloyDBAdminClient",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/pagers.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/pagers.py new file mode 100644 index 000000000000..9fea667865fb --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/pagers.py @@ -0,0 +1,539 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.alloydb_v1alpha.types import resources, service + + +class ListClustersPager: + """A pager for iterating through ``list_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1alpha.types.ListClustersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``clusters`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListClusters`` requests and continue to iterate + through the ``clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1alpha.types.ListClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListClustersResponse], + request: service.ListClustersRequest, + response: service.ListClustersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1alpha.types.ListClustersRequest): + The initial request object. + response (google.cloud.alloydb_v1alpha.types.ListClustersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListClustersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Cluster]: + for page in self.pages: + yield from page.clusters + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListClustersAsyncPager: + """A pager for iterating through ``list_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1alpha.types.ListClustersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``clusters`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListClusters`` requests and continue to iterate + through the ``clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1alpha.types.ListClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListClustersResponse]], + request: service.ListClustersRequest, + response: service.ListClustersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1alpha.types.ListClustersRequest): + The initial request object. + response (google.cloud.alloydb_v1alpha.types.ListClustersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListClustersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Cluster]: + async def async_generator(): + async for page in self.pages: + for response in page.clusters: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1alpha.types.ListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1alpha.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListInstancesResponse], + request: service.ListInstancesRequest, + response: service.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1alpha.types.ListInstancesRequest): + The initial request object. + response (google.cloud.alloydb_v1alpha.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Instance]: + for page in self.pages: + yield from page.instances + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstancesAsyncPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1alpha.types.ListInstancesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1alpha.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListInstancesResponse]], + request: service.ListInstancesRequest, + response: service.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1alpha.types.ListInstancesRequest): + The initial request object. + response (google.cloud.alloydb_v1alpha.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Instance]: + async def async_generator(): + async for page in self.pages: + for response in page.instances: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1alpha.types.ListBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1alpha.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListBackupsResponse], + request: service.ListBackupsRequest, + response: service.ListBackupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1alpha.types.ListBackupsRequest): + The initial request object. + response (google.cloud.alloydb_v1alpha.types.ListBackupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListBackupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Backup]: + for page in self.pages: + yield from page.backups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsAsyncPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1alpha.types.ListBackupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1alpha.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListBackupsResponse]], + request: service.ListBackupsRequest, + response: service.ListBackupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1alpha.types.ListBackupsRequest): + The initial request object. + response (google.cloud.alloydb_v1alpha.types.ListBackupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListBackupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Backup]: + async def async_generator(): + async for page in self.pages: + for response in page.backups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSupportedDatabaseFlagsPager: + """A pager for iterating through ``list_supported_database_flags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1alpha.types.ListSupportedDatabaseFlagsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``supported_database_flags`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSupportedDatabaseFlags`` requests and continue to iterate + through the ``supported_database_flags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1alpha.types.ListSupportedDatabaseFlagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListSupportedDatabaseFlagsResponse], + request: service.ListSupportedDatabaseFlagsRequest, + response: service.ListSupportedDatabaseFlagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1alpha.types.ListSupportedDatabaseFlagsRequest): + The initial request object. + response (google.cloud.alloydb_v1alpha.types.ListSupportedDatabaseFlagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSupportedDatabaseFlagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListSupportedDatabaseFlagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.SupportedDatabaseFlag]: + for page in self.pages: + yield from page.supported_database_flags + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSupportedDatabaseFlagsAsyncPager: + """A pager for iterating through ``list_supported_database_flags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1alpha.types.ListSupportedDatabaseFlagsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``supported_database_flags`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSupportedDatabaseFlags`` requests and continue to iterate + through the ``supported_database_flags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1alpha.types.ListSupportedDatabaseFlagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListSupportedDatabaseFlagsResponse]], + request: service.ListSupportedDatabaseFlagsRequest, + response: service.ListSupportedDatabaseFlagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1alpha.types.ListSupportedDatabaseFlagsRequest): + The initial request object. + response (google.cloud.alloydb_v1alpha.types.ListSupportedDatabaseFlagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSupportedDatabaseFlagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListSupportedDatabaseFlagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.SupportedDatabaseFlag]: + async def async_generator(): + async for page in self.pages: + for response in page.supported_database_flags: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/__init__.py new file mode 100644 index 000000000000..1e347e0f52b8 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AlloyDBAdminTransport +from .grpc import AlloyDBAdminGrpcTransport +from .grpc_asyncio import AlloyDBAdminGrpcAsyncIOTransport +from .rest import AlloyDBAdminRestInterceptor, AlloyDBAdminRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AlloyDBAdminTransport]] +_transport_registry["grpc"] = AlloyDBAdminGrpcTransport +_transport_registry["grpc_asyncio"] = AlloyDBAdminGrpcAsyncIOTransport +_transport_registry["rest"] = AlloyDBAdminRestTransport + +__all__ = ( + "AlloyDBAdminTransport", + "AlloyDBAdminGrpcTransport", + "AlloyDBAdminGrpcAsyncIOTransport", + "AlloyDBAdminRestTransport", + "AlloyDBAdminRestInterceptor", +) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/base.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/base.py new file mode 100644 index 000000000000..68b79c60aa5e --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/base.py @@ -0,0 +1,640 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.alloydb_v1alpha import gapic_version as package_version +from google.cloud.alloydb_v1alpha.types import resources, service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AlloyDBAdminTransport(abc.ABC): + """Abstract transport class for AlloyDBAdmin.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "alloydb.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_clusters: gapic_v1.method.wrap_method( + self.list_clusters, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cluster: gapic_v1.method.wrap_method( + self.get_cluster, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_cluster: gapic_v1.method.wrap_method( + self.create_cluster, + default_timeout=None, + client_info=client_info, + ), + self.update_cluster: gapic_v1.method.wrap_method( + self.update_cluster, + default_timeout=None, + client_info=client_info, + ), + self.delete_cluster: gapic_v1.method.wrap_method( + self.delete_cluster, + default_timeout=None, + client_info=client_info, + ), + self.promote_cluster: gapic_v1.method.wrap_method( + self.promote_cluster, + default_timeout=None, + client_info=client_info, + ), + self.restore_cluster: gapic_v1.method.wrap_method( + self.restore_cluster, + default_timeout=None, + client_info=client_info, + ), + self.create_secondary_cluster: gapic_v1.method.wrap_method( + self.create_secondary_cluster, + default_timeout=None, + client_info=client_info, + ), + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, + default_timeout=None, + client_info=client_info, + ), + self.create_secondary_instance: gapic_v1.method.wrap_method( + self.create_secondary_instance, + default_timeout=None, + client_info=client_info, + ), + self.batch_create_instances: gapic_v1.method.wrap_method( + self.batch_create_instances, + default_timeout=None, + client_info=client_info, + ), + self.update_instance: gapic_v1.method.wrap_method( + self.update_instance, + default_timeout=None, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, + default_timeout=None, + client_info=client_info, + ), + self.failover_instance: gapic_v1.method.wrap_method( + self.failover_instance, + default_timeout=None, + client_info=client_info, + ), + self.restart_instance: gapic_v1.method.wrap_method( + self.restart_instance, + default_timeout=None, + client_info=client_info, + ), + self.list_backups: gapic_v1.method.wrap_method( + self.list_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method.wrap_method( + self.get_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_backup: gapic_v1.method.wrap_method( + self.create_backup, + default_timeout=None, + client_info=client_info, + ), + self.update_backup: gapic_v1.method.wrap_method( + self.update_backup, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method.wrap_method( + self.delete_backup, + default_timeout=None, + client_info=client_info, + ), + self.list_supported_database_flags: gapic_v1.method.wrap_method( + self.list_supported_database_flags, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.generate_client_certificate: gapic_v1.method.wrap_method( + self.generate_client_certificate, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_connection_info: gapic_v1.method.wrap_method( + self.get_connection_info, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_clusters( + self, + ) -> Callable[ + [service.ListClustersRequest], + Union[service.ListClustersResponse, Awaitable[service.ListClustersResponse]], + ]: + raise NotImplementedError() + + @property + def get_cluster( + self, + ) -> Callable[ + [service.GetClusterRequest], + Union[resources.Cluster, Awaitable[resources.Cluster]], + ]: + raise NotImplementedError() + + @property + def create_cluster( + self, + ) -> Callable[ + [service.CreateClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_cluster( + self, + ) -> Callable[ + [service.UpdateClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_cluster( + self, + ) -> Callable[ + [service.DeleteClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def promote_cluster( + self, + ) -> Callable[ + [service.PromoteClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def restore_cluster( + self, + ) -> Callable[ + [service.RestoreClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_secondary_cluster( + self, + ) -> Callable[ + [service.CreateSecondaryClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_instances( + self, + ) -> Callable[ + [service.ListInstancesRequest], + Union[service.ListInstancesResponse, Awaitable[service.ListInstancesResponse]], + ]: + raise NotImplementedError() + + @property + def get_instance( + self, + ) -> Callable[ + [service.GetInstanceRequest], + Union[resources.Instance, Awaitable[resources.Instance]], + ]: + raise NotImplementedError() + + @property + def create_instance( + self, + ) -> Callable[ + [service.CreateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_secondary_instance( + self, + ) -> Callable[ + [service.CreateSecondaryInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def batch_create_instances( + self, + ) -> Callable[ + [service.BatchCreateInstancesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_instance( + self, + ) -> Callable[ + [service.UpdateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_instance( + self, + ) -> Callable[ + [service.DeleteInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def failover_instance( + self, + ) -> Callable[ + [service.FailoverInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def restart_instance( + self, + ) -> Callable[ + [service.RestartInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_backups( + self, + ) -> Callable[ + [service.ListBackupsRequest], + Union[service.ListBackupsResponse, Awaitable[service.ListBackupsResponse]], + ]: + raise NotImplementedError() + + @property + def get_backup( + self, + ) -> Callable[ + [service.GetBackupRequest], Union[resources.Backup, Awaitable[resources.Backup]] + ]: + raise NotImplementedError() + + @property + def create_backup( + self, + ) -> Callable[ + [service.CreateBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_backup( + self, + ) -> Callable[ + [service.UpdateBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_backup( + self, + ) -> Callable[ + [service.DeleteBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_supported_database_flags( + self, + ) -> Callable[ + [service.ListSupportedDatabaseFlagsRequest], + Union[ + service.ListSupportedDatabaseFlagsResponse, + Awaitable[service.ListSupportedDatabaseFlagsResponse], + ], + ]: + raise NotImplementedError() + + @property + def generate_client_certificate( + self, + ) -> Callable[ + [service.GenerateClientCertificateRequest], + Union[ + service.GenerateClientCertificateResponse, + Awaitable[service.GenerateClientCertificateResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_connection_info( + self, + ) -> Callable[ + [service.GetConnectionInfoRequest], + Union[resources.ConnectionInfo, Awaitable[resources.ConnectionInfo]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AlloyDBAdminTransport",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc.py new file mode 100644 index 000000000000..3f0347a25642 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc.py @@ -0,0 +1,1049 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.alloydb_v1alpha.types import resources, service + +from .base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport + + +class AlloyDBAdminGrpcTransport(AlloyDBAdminTransport): + """gRPC backend transport for AlloyDBAdmin. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "alloydb.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "alloydb.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_clusters( + self, + ) -> Callable[[service.ListClustersRequest], service.ListClustersResponse]: + r"""Return a callable for the list clusters method over gRPC. + + Lists Clusters in a given project and location. + + Returns: + Callable[[~.ListClustersRequest], + ~.ListClustersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_clusters" not in self._stubs: + self._stubs["list_clusters"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/ListClusters", + request_serializer=service.ListClustersRequest.serialize, + response_deserializer=service.ListClustersResponse.deserialize, + ) + return self._stubs["list_clusters"] + + @property + def get_cluster(self) -> Callable[[service.GetClusterRequest], resources.Cluster]: + r"""Return a callable for the get cluster method over gRPC. + + Gets details of a single Cluster. + + Returns: + Callable[[~.GetClusterRequest], + ~.Cluster]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster" not in self._stubs: + self._stubs["get_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/GetCluster", + request_serializer=service.GetClusterRequest.serialize, + response_deserializer=resources.Cluster.deserialize, + ) + return self._stubs["get_cluster"] + + @property + def create_cluster( + self, + ) -> Callable[[service.CreateClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the create cluster method over gRPC. + + Creates a new Cluster in a given project and + location. + + Returns: + Callable[[~.CreateClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cluster" not in self._stubs: + self._stubs["create_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/CreateCluster", + request_serializer=service.CreateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_cluster"] + + @property + def update_cluster( + self, + ) -> Callable[[service.UpdateClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the update cluster method over gRPC. + + Updates the parameters of a single Cluster. + + Returns: + Callable[[~.UpdateClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cluster" not in self._stubs: + self._stubs["update_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/UpdateCluster", + request_serializer=service.UpdateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_cluster"] + + @property + def delete_cluster( + self, + ) -> Callable[[service.DeleteClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the delete cluster method over gRPC. + + Deletes a single Cluster. + + Returns: + Callable[[~.DeleteClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cluster" not in self._stubs: + self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/DeleteCluster", + request_serializer=service.DeleteClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_cluster"] + + @property + def promote_cluster( + self, + ) -> Callable[[service.PromoteClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the promote cluster method over gRPC. + + Promotes a SECONDARY cluster. This turns down + replication from the PRIMARY cluster and promotes a + secondary cluster into its own standalone cluster. + Imperative only. + + Returns: + Callable[[~.PromoteClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "promote_cluster" not in self._stubs: + self._stubs["promote_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/PromoteCluster", + request_serializer=service.PromoteClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["promote_cluster"] + + @property + def restore_cluster( + self, + ) -> Callable[[service.RestoreClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the restore cluster method over gRPC. + + Creates a new Cluster in a given project and + location, with a volume restored from the provided + source, either a backup ID or a point-in-time and a + source cluster. + + Returns: + Callable[[~.RestoreClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_cluster" not in self._stubs: + self._stubs["restore_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/RestoreCluster", + request_serializer=service.RestoreClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_cluster"] + + @property + def create_secondary_cluster( + self, + ) -> Callable[[service.CreateSecondaryClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the create secondary cluster method over gRPC. + + Creates a cluster of type SECONDARY in the given + location using the primary cluster as the source. + + Returns: + Callable[[~.CreateSecondaryClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_secondary_cluster" not in self._stubs: + self._stubs["create_secondary_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/CreateSecondaryCluster", + request_serializer=service.CreateSecondaryClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_secondary_cluster"] + + @property + def list_instances( + self, + ) -> Callable[[service.ListInstancesRequest], service.ListInstancesResponse]: + r"""Return a callable for the list instances method over gRPC. + + Lists Instances in a given project and location. + + Returns: + Callable[[~.ListInstancesRequest], + ~.ListInstancesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/ListInstances", + request_serializer=service.ListInstancesRequest.serialize, + response_deserializer=service.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[[service.GetInstanceRequest], resources.Instance]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single Instance. + + Returns: + Callable[[~.GetInstanceRequest], + ~.Instance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/GetInstance", + request_serializer=service.GetInstanceRequest.serialize, + response_deserializer=resources.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[[service.CreateInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the create instance method over gRPC. + + Creates a new Instance in a given project and + location. + + Returns: + Callable[[~.CreateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/CreateInstance", + request_serializer=service.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def create_secondary_instance( + self, + ) -> Callable[[service.CreateSecondaryInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the create secondary instance method over gRPC. + + Creates a new SECONDARY Instance in a given project + and location. + + Returns: + Callable[[~.CreateSecondaryInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_secondary_instance" not in self._stubs: + self._stubs["create_secondary_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/CreateSecondaryInstance", + request_serializer=service.CreateSecondaryInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_secondary_instance"] + + @property + def batch_create_instances( + self, + ) -> Callable[[service.BatchCreateInstancesRequest], operations_pb2.Operation]: + r"""Return a callable for the batch create instances method over gRPC. + + Creates new instances under the given project, + location and cluster. There can be only one primary + instance in a cluster. If the primary instance exists in + the cluster as well as this request, then API will throw + an error. + The primary instance should exist before any read pool + instance is created. If the primary instance is a part + of the request payload, then the API will take care of + creating instances in the correct order. This method is + here to support Google-internal use cases, and is not + meant for external customers to consume. Please do not + start relying on it; its behavior is subject to change + without notice. + + Returns: + Callable[[~.BatchCreateInstancesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_instances" not in self._stubs: + self._stubs["batch_create_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/BatchCreateInstances", + request_serializer=service.BatchCreateInstancesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_create_instances"] + + @property + def update_instance( + self, + ) -> Callable[[service.UpdateInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the update instance method over gRPC. + + Updates the parameters of a single Instance. + + Returns: + Callable[[~.UpdateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/UpdateInstance", + request_serializer=service.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_instance"] + + @property + def delete_instance( + self, + ) -> Callable[[service.DeleteInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single Instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/DeleteInstance", + request_serializer=service.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def failover_instance( + self, + ) -> Callable[[service.FailoverInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the failover instance method over gRPC. + + Forces a Failover for a highly available instance. + Failover promotes the HA standby instance as the new + primary. Imperative only. + + Returns: + Callable[[~.FailoverInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "failover_instance" not in self._stubs: + self._stubs["failover_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/FailoverInstance", + request_serializer=service.FailoverInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["failover_instance"] + + @property + def restart_instance( + self, + ) -> Callable[[service.RestartInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the restart instance method over gRPC. + + Restart an Instance in a cluster. + Imperative only. + + Returns: + Callable[[~.RestartInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restart_instance" not in self._stubs: + self._stubs["restart_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/RestartInstance", + request_serializer=service.RestartInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restart_instance"] + + @property + def list_backups( + self, + ) -> Callable[[service.ListBackupsRequest], service.ListBackupsResponse]: + r"""Return a callable for the list backups method over gRPC. + + Lists Backups in a given project and location. + + Returns: + Callable[[~.ListBackupsRequest], + ~.ListBackupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/ListBackups", + request_serializer=service.ListBackupsRequest.serialize, + response_deserializer=service.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup(self) -> Callable[[service.GetBackupRequest], resources.Backup]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a single Backup. + + Returns: + Callable[[~.GetBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/GetBackup", + request_serializer=service.GetBackupRequest.serialize, + response_deserializer=resources.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def create_backup( + self, + ) -> Callable[[service.CreateBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the create backup method over gRPC. + + Creates a new Backup in a given project and location. + + Returns: + Callable[[~.CreateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup" not in self._stubs: + self._stubs["create_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/CreateBackup", + request_serializer=service.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup"] + + @property + def update_backup( + self, + ) -> Callable[[service.UpdateBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the update backup method over gRPC. + + Updates the parameters of a single Backup. + + Returns: + Callable[[~.UpdateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/UpdateBackup", + request_serializer=service.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def delete_backup( + self, + ) -> Callable[[service.DeleteBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a single Backup. + + Returns: + Callable[[~.DeleteBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/DeleteBackup", + request_serializer=service.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def list_supported_database_flags( + self, + ) -> Callable[ + [service.ListSupportedDatabaseFlagsRequest], + service.ListSupportedDatabaseFlagsResponse, + ]: + r"""Return a callable for the list supported database flags method over gRPC. + + Lists SupportedDatabaseFlags for a given project and + location. + + Returns: + Callable[[~.ListSupportedDatabaseFlagsRequest], + ~.ListSupportedDatabaseFlagsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_supported_database_flags" not in self._stubs: + self._stubs[ + "list_supported_database_flags" + ] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/ListSupportedDatabaseFlags", + request_serializer=service.ListSupportedDatabaseFlagsRequest.serialize, + response_deserializer=service.ListSupportedDatabaseFlagsResponse.deserialize, + ) + return self._stubs["list_supported_database_flags"] + + @property + def generate_client_certificate( + self, + ) -> Callable[ + [service.GenerateClientCertificateRequest], + service.GenerateClientCertificateResponse, + ]: + r"""Return a callable for the generate client certificate method over gRPC. + + Generate a client certificate signed by a Cluster CA. + The sole purpose of this endpoint is to support the Auth + Proxy client and the endpoint's behavior is subject to + change without notice, so do not rely on its behavior + remaining constant. Future changes will not break the + Auth Proxy client. + + Returns: + Callable[[~.GenerateClientCertificateRequest], + ~.GenerateClientCertificateResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_client_certificate" not in self._stubs: + self._stubs["generate_client_certificate"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/GenerateClientCertificate", + request_serializer=service.GenerateClientCertificateRequest.serialize, + response_deserializer=service.GenerateClientCertificateResponse.deserialize, + ) + return self._stubs["generate_client_certificate"] + + @property + def get_connection_info( + self, + ) -> Callable[[service.GetConnectionInfoRequest], resources.ConnectionInfo]: + r"""Return a callable for the get connection info method over gRPC. + + Get instance metadata used for a connection. + + Returns: + Callable[[~.GetConnectionInfoRequest], + ~.ConnectionInfo]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_connection_info" not in self._stubs: + self._stubs["get_connection_info"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/GetConnectionInfo", + request_serializer=service.GetConnectionInfoRequest.serialize, + response_deserializer=resources.ConnectionInfo.deserialize, + ) + return self._stubs["get_connection_info"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AlloyDBAdminGrpcTransport",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc_asyncio.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc_asyncio.py new file mode 100644 index 000000000000..9a6ec4d01bb7 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc_asyncio.py @@ -0,0 +1,1070 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.alloydb_v1alpha.types import resources, service + +from .base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport +from .grpc import AlloyDBAdminGrpcTransport + + +class AlloyDBAdminGrpcAsyncIOTransport(AlloyDBAdminTransport): + """gRPC AsyncIO backend transport for AlloyDBAdmin. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "alloydb.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "alloydb.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_clusters( + self, + ) -> Callable[ + [service.ListClustersRequest], Awaitable[service.ListClustersResponse] + ]: + r"""Return a callable for the list clusters method over gRPC. + + Lists Clusters in a given project and location. + + Returns: + Callable[[~.ListClustersRequest], + Awaitable[~.ListClustersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_clusters" not in self._stubs: + self._stubs["list_clusters"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/ListClusters", + request_serializer=service.ListClustersRequest.serialize, + response_deserializer=service.ListClustersResponse.deserialize, + ) + return self._stubs["list_clusters"] + + @property + def get_cluster( + self, + ) -> Callable[[service.GetClusterRequest], Awaitable[resources.Cluster]]: + r"""Return a callable for the get cluster method over gRPC. + + Gets details of a single Cluster. + + Returns: + Callable[[~.GetClusterRequest], + Awaitable[~.Cluster]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster" not in self._stubs: + self._stubs["get_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/GetCluster", + request_serializer=service.GetClusterRequest.serialize, + response_deserializer=resources.Cluster.deserialize, + ) + return self._stubs["get_cluster"] + + @property + def create_cluster( + self, + ) -> Callable[[service.CreateClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create cluster method over gRPC. + + Creates a new Cluster in a given project and + location. + + Returns: + Callable[[~.CreateClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cluster" not in self._stubs: + self._stubs["create_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/CreateCluster", + request_serializer=service.CreateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_cluster"] + + @property + def update_cluster( + self, + ) -> Callable[[service.UpdateClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update cluster method over gRPC. + + Updates the parameters of a single Cluster. + + Returns: + Callable[[~.UpdateClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cluster" not in self._stubs: + self._stubs["update_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/UpdateCluster", + request_serializer=service.UpdateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_cluster"] + + @property + def delete_cluster( + self, + ) -> Callable[[service.DeleteClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete cluster method over gRPC. + + Deletes a single Cluster. + + Returns: + Callable[[~.DeleteClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cluster" not in self._stubs: + self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/DeleteCluster", + request_serializer=service.DeleteClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_cluster"] + + @property + def promote_cluster( + self, + ) -> Callable[[service.PromoteClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the promote cluster method over gRPC. + + Promotes a SECONDARY cluster. This turns down + replication from the PRIMARY cluster and promotes a + secondary cluster into its own standalone cluster. + Imperative only. + + Returns: + Callable[[~.PromoteClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "promote_cluster" not in self._stubs: + self._stubs["promote_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/PromoteCluster", + request_serializer=service.PromoteClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["promote_cluster"] + + @property + def restore_cluster( + self, + ) -> Callable[[service.RestoreClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the restore cluster method over gRPC. + + Creates a new Cluster in a given project and + location, with a volume restored from the provided + source, either a backup ID or a point-in-time and a + source cluster. + + Returns: + Callable[[~.RestoreClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_cluster" not in self._stubs: + self._stubs["restore_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/RestoreCluster", + request_serializer=service.RestoreClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_cluster"] + + @property + def create_secondary_cluster( + self, + ) -> Callable[ + [service.CreateSecondaryClusterRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create secondary cluster method over gRPC. + + Creates a cluster of type SECONDARY in the given + location using the primary cluster as the source. + + Returns: + Callable[[~.CreateSecondaryClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_secondary_cluster" not in self._stubs: + self._stubs["create_secondary_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/CreateSecondaryCluster", + request_serializer=service.CreateSecondaryClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_secondary_cluster"] + + @property + def list_instances( + self, + ) -> Callable[ + [service.ListInstancesRequest], Awaitable[service.ListInstancesResponse] + ]: + r"""Return a callable for the list instances method over gRPC. + + Lists Instances in a given project and location. + + Returns: + Callable[[~.ListInstancesRequest], + Awaitable[~.ListInstancesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/ListInstances", + request_serializer=service.ListInstancesRequest.serialize, + response_deserializer=service.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[[service.GetInstanceRequest], Awaitable[resources.Instance]]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single Instance. + + Returns: + Callable[[~.GetInstanceRequest], + Awaitable[~.Instance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/GetInstance", + request_serializer=service.GetInstanceRequest.serialize, + response_deserializer=resources.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[[service.CreateInstanceRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create instance method over gRPC. + + Creates a new Instance in a given project and + location. + + Returns: + Callable[[~.CreateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/CreateInstance", + request_serializer=service.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def create_secondary_instance( + self, + ) -> Callable[ + [service.CreateSecondaryInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create secondary instance method over gRPC. + + Creates a new SECONDARY Instance in a given project + and location. + + Returns: + Callable[[~.CreateSecondaryInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_secondary_instance" not in self._stubs: + self._stubs["create_secondary_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/CreateSecondaryInstance", + request_serializer=service.CreateSecondaryInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_secondary_instance"] + + @property + def batch_create_instances( + self, + ) -> Callable[ + [service.BatchCreateInstancesRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the batch create instances method over gRPC. + + Creates new instances under the given project, + location and cluster. There can be only one primary + instance in a cluster. If the primary instance exists in + the cluster as well as this request, then API will throw + an error. + The primary instance should exist before any read pool + instance is created. If the primary instance is a part + of the request payload, then the API will take care of + creating instances in the correct order. This method is + here to support Google-internal use cases, and is not + meant for external customers to consume. Please do not + start relying on it; its behavior is subject to change + without notice. + + Returns: + Callable[[~.BatchCreateInstancesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_instances" not in self._stubs: + self._stubs["batch_create_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/BatchCreateInstances", + request_serializer=service.BatchCreateInstancesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_create_instances"] + + @property + def update_instance( + self, + ) -> Callable[[service.UpdateInstanceRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update instance method over gRPC. + + Updates the parameters of a single Instance. + + Returns: + Callable[[~.UpdateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/UpdateInstance", + request_serializer=service.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_instance"] + + @property + def delete_instance( + self, + ) -> Callable[[service.DeleteInstanceRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single Instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/DeleteInstance", + request_serializer=service.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def failover_instance( + self, + ) -> Callable[ + [service.FailoverInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the failover instance method over gRPC. + + Forces a Failover for a highly available instance. + Failover promotes the HA standby instance as the new + primary. Imperative only. + + Returns: + Callable[[~.FailoverInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "failover_instance" not in self._stubs: + self._stubs["failover_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/FailoverInstance", + request_serializer=service.FailoverInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["failover_instance"] + + @property + def restart_instance( + self, + ) -> Callable[ + [service.RestartInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the restart instance method over gRPC. + + Restart an Instance in a cluster. + Imperative only. + + Returns: + Callable[[~.RestartInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restart_instance" not in self._stubs: + self._stubs["restart_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/RestartInstance", + request_serializer=service.RestartInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restart_instance"] + + @property + def list_backups( + self, + ) -> Callable[[service.ListBackupsRequest], Awaitable[service.ListBackupsResponse]]: + r"""Return a callable for the list backups method over gRPC. + + Lists Backups in a given project and location. + + Returns: + Callable[[~.ListBackupsRequest], + Awaitable[~.ListBackupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/ListBackups", + request_serializer=service.ListBackupsRequest.serialize, + response_deserializer=service.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup( + self, + ) -> Callable[[service.GetBackupRequest], Awaitable[resources.Backup]]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a single Backup. + + Returns: + Callable[[~.GetBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/GetBackup", + request_serializer=service.GetBackupRequest.serialize, + response_deserializer=resources.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def create_backup( + self, + ) -> Callable[[service.CreateBackupRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create backup method over gRPC. + + Creates a new Backup in a given project and location. + + Returns: + Callable[[~.CreateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup" not in self._stubs: + self._stubs["create_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/CreateBackup", + request_serializer=service.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup"] + + @property + def update_backup( + self, + ) -> Callable[[service.UpdateBackupRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update backup method over gRPC. + + Updates the parameters of a single Backup. + + Returns: + Callable[[~.UpdateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/UpdateBackup", + request_serializer=service.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def delete_backup( + self, + ) -> Callable[[service.DeleteBackupRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a single Backup. + + Returns: + Callable[[~.DeleteBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/DeleteBackup", + request_serializer=service.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def list_supported_database_flags( + self, + ) -> Callable[ + [service.ListSupportedDatabaseFlagsRequest], + Awaitable[service.ListSupportedDatabaseFlagsResponse], + ]: + r"""Return a callable for the list supported database flags method over gRPC. + + Lists SupportedDatabaseFlags for a given project and + location. + + Returns: + Callable[[~.ListSupportedDatabaseFlagsRequest], + Awaitable[~.ListSupportedDatabaseFlagsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_supported_database_flags" not in self._stubs: + self._stubs[ + "list_supported_database_flags" + ] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/ListSupportedDatabaseFlags", + request_serializer=service.ListSupportedDatabaseFlagsRequest.serialize, + response_deserializer=service.ListSupportedDatabaseFlagsResponse.deserialize, + ) + return self._stubs["list_supported_database_flags"] + + @property + def generate_client_certificate( + self, + ) -> Callable[ + [service.GenerateClientCertificateRequest], + Awaitable[service.GenerateClientCertificateResponse], + ]: + r"""Return a callable for the generate client certificate method over gRPC. + + Generate a client certificate signed by a Cluster CA. + The sole purpose of this endpoint is to support the Auth + Proxy client and the endpoint's behavior is subject to + change without notice, so do not rely on its behavior + remaining constant. Future changes will not break the + Auth Proxy client. + + Returns: + Callable[[~.GenerateClientCertificateRequest], + Awaitable[~.GenerateClientCertificateResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_client_certificate" not in self._stubs: + self._stubs["generate_client_certificate"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/GenerateClientCertificate", + request_serializer=service.GenerateClientCertificateRequest.serialize, + response_deserializer=service.GenerateClientCertificateResponse.deserialize, + ) + return self._stubs["generate_client_certificate"] + + @property + def get_connection_info( + self, + ) -> Callable[ + [service.GetConnectionInfoRequest], Awaitable[resources.ConnectionInfo] + ]: + r"""Return a callable for the get connection info method over gRPC. + + Get instance metadata used for a connection. + + Returns: + Callable[[~.GetConnectionInfoRequest], + Awaitable[~.ConnectionInfo]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_connection_info" not in self._stubs: + self._stubs["get_connection_info"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/GetConnectionInfo", + request_serializer=service.GetConnectionInfoRequest.serialize, + response_deserializer=resources.ConnectionInfo.deserialize, + ) + return self._stubs["get_connection_info"] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("AlloyDBAdminGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py new file mode 100644 index 000000000000..933c03bd4f6a --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py @@ -0,0 +1,4075 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.alloydb_v1alpha.types import resources, service + +from .base import AlloyDBAdminTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AlloyDBAdminRestInterceptor: + """Interceptor for AlloyDBAdmin. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AlloyDBAdminRestTransport. + + .. code-block:: python + class MyCustomAlloyDBAdminInterceptor(AlloyDBAdminRestInterceptor): + def pre_batch_create_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_secondary_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_secondary_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_secondary_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_secondary_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_failover_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_failover_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_client_certificate(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_client_certificate(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_connection_info(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_connection_info(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_clusters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_clusters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_supported_database_flags(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_supported_database_flags(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_promote_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_promote_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_restart_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restart_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_restore_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AlloyDBAdminRestTransport(interceptor=MyCustomAlloyDBAdminInterceptor()) + client = AlloyDBAdminClient(transport=transport) + + + """ + + def pre_batch_create_instances( + self, + request: service.BatchCreateInstancesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.BatchCreateInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_create_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_batch_create_instances( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for batch_create_instances + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_create_backup( + self, request: service.CreateBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.CreateBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_create_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_backup + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_create_cluster( + self, request: service.CreateClusterRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.CreateClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_create_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_create_instance( + self, + request: service.CreateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_create_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_create_secondary_cluster( + self, + request: service.CreateSecondaryClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.CreateSecondaryClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_secondary_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_create_secondary_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_secondary_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_create_secondary_instance( + self, + request: service.CreateSecondaryInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.CreateSecondaryInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_secondary_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_create_secondary_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_secondary_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_backup( + self, request: service.DeleteBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.DeleteBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_delete_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_cluster( + self, request: service.DeleteClusterRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.DeleteClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_delete_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_instance( + self, + request: service.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_delete_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_failover_instance( + self, + request: service.FailoverInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.FailoverInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for failover_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_failover_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for failover_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_generate_client_certificate( + self, + request: service.GenerateClientCertificateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.GenerateClientCertificateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_client_certificate + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_generate_client_certificate( + self, response: service.GenerateClientCertificateResponse + ) -> service.GenerateClientCertificateResponse: + """Post-rpc interceptor for generate_client_certificate + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_backup( + self, request: service.GetBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_backup(self, response: resources.Backup) -> resources.Backup: + """Post-rpc interceptor for get_backup + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_cluster( + self, request: service.GetClusterRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_cluster(self, response: resources.Cluster) -> resources.Cluster: + """Post-rpc interceptor for get_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_connection_info( + self, + request: service.GetConnectionInfoRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.GetConnectionInfoRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_connection_info + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_connection_info( + self, response: resources.ConnectionInfo + ) -> resources.ConnectionInfo: + """Post-rpc interceptor for get_connection_info + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_instance( + self, request: service.GetInstanceRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_instance(self, response: resources.Instance) -> resources.Instance: + """Post-rpc interceptor for get_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_backups( + self, request: service.ListBackupsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListBackupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backups + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_backups( + self, response: service.ListBackupsResponse + ) -> service.ListBackupsResponse: + """Post-rpc interceptor for list_backups + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_clusters( + self, request: service.ListClustersRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListClustersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_clusters + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_clusters( + self, response: service.ListClustersResponse + ) -> service.ListClustersResponse: + """Post-rpc interceptor for list_clusters + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_instances( + self, request: service.ListInstancesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_instances( + self, response: service.ListInstancesResponse + ) -> service.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_supported_database_flags( + self, + request: service.ListSupportedDatabaseFlagsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.ListSupportedDatabaseFlagsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_supported_database_flags + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_supported_database_flags( + self, response: service.ListSupportedDatabaseFlagsResponse + ) -> service.ListSupportedDatabaseFlagsResponse: + """Post-rpc interceptor for list_supported_database_flags + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_promote_cluster( + self, + request: service.PromoteClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.PromoteClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for promote_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_promote_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for promote_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_restart_instance( + self, + request: service.RestartInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.RestartInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for restart_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_restart_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for restart_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_restore_cluster( + self, + request: service.RestoreClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.RestoreClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for restore_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_restore_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_update_backup( + self, request: service.UpdateBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.UpdateBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_update_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backup + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_update_cluster( + self, request: service.UpdateClusterRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.UpdateClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_update_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_update_instance( + self, + request: service.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AlloyDBAdminRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AlloyDBAdminRestInterceptor + + +class AlloyDBAdminRestTransport(AlloyDBAdminTransport): + """REST backend transport for AlloyDBAdmin. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "alloydb.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AlloyDBAdminRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AlloyDBAdminRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1alpha", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _BatchCreateInstances(AlloyDBAdminRestStub): + def __hash__(self): + return hash("BatchCreateInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.BatchCreateInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the batch create instances method over HTTP. + + Args: + request (~.service.BatchCreateInstancesRequest): + The request object. Message for creating a batch of + instances under the specified cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/clusters/*}/instances:batchCreate", + "body": "requests", + }, + ] + request, metadata = self._interceptor.pre_batch_create_instances( + request, metadata + ) + pb_request = service.BatchCreateInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_create_instances(resp) + return resp + + class _CreateBackup(AlloyDBAdminRestStub): + def __hash__(self): + return hash("CreateBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backup method over HTTP. + + Args: + request (~.service.CreateBackupRequest): + The request object. Message for creating a Backup + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*}/backups", + "body": "backup", + }, + ] + request, metadata = self._interceptor.pre_create_backup(request, metadata) + pb_request = service.CreateBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup(resp) + return resp + + class _CreateCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("CreateCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "clusterId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create cluster method over HTTP. + + Args: + request (~.service.CreateClusterRequest): + The request object. Message for creating a Cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*}/clusters", + "body": "cluster", + }, + ] + request, metadata = self._interceptor.pre_create_cluster(request, metadata) + pb_request = service.CreateClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_cluster(resp) + return resp + + class _CreateInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("CreateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.service.CreateInstanceRequest): + The request object. Message for creating a Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/clusters/*}/instances", + "body": "instance", + }, + ] + request, metadata = self._interceptor.pre_create_instance(request, metadata) + pb_request = service.CreateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_instance(resp) + return resp + + class _CreateSecondaryCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("CreateSecondaryCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "clusterId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateSecondaryClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create secondary cluster method over HTTP. + + Args: + request (~.service.CreateSecondaryClusterRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*}/clusters:createsecondary", + "body": "cluster", + }, + ] + request, metadata = self._interceptor.pre_create_secondary_cluster( + request, metadata + ) + pb_request = service.CreateSecondaryClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_secondary_cluster(resp) + return resp + + class _CreateSecondaryInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("CreateSecondaryInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateSecondaryInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create secondary instance method over HTTP. + + Args: + request (~.service.CreateSecondaryInstanceRequest): + The request object. Message for creating a Secondary + Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/clusters/*}/instances:createsecondary", + "body": "instance", + }, + ] + request, metadata = self._interceptor.pre_create_secondary_instance( + request, metadata + ) + pb_request = service.CreateSecondaryInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_secondary_instance(resp) + return resp + + class _DeleteBackup(AlloyDBAdminRestStub): + def __hash__(self): + return hash("DeleteBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.DeleteBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup method over HTTP. + + Args: + request (~.service.DeleteBackupRequest): + The request object. Message for deleting a Backup + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/backups/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup(request, metadata) + pb_request = service.DeleteBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup(resp) + return resp + + class _DeleteCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("DeleteCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.DeleteClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete cluster method over HTTP. + + Args: + request (~.service.DeleteClusterRequest): + The request object. Message for deleting a Cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/clusters/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_cluster(request, metadata) + pb_request = service.DeleteClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_cluster(resp) + return resp + + class _DeleteInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("DeleteInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete instance method over HTTP. + + Args: + request (~.service.DeleteInstanceRequest): + The request object. Message for deleting a Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_instance(request, metadata) + pb_request = service.DeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_instance(resp) + return resp + + class _FailoverInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("FailoverInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.FailoverInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the failover instance method over HTTP. + + Args: + request (~.service.FailoverInstanceRequest): + The request object. Message for triggering failover on an + Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}:failover", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_failover_instance( + request, metadata + ) + pb_request = service.FailoverInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_failover_instance(resp) + return resp + + class _GenerateClientCertificate(AlloyDBAdminRestStub): + def __hash__(self): + return hash("GenerateClientCertificate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GenerateClientCertificateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.GenerateClientCertificateResponse: + r"""Call the generate client + certificate method over HTTP. + + Args: + request (~.service.GenerateClientCertificateRequest): + The request object. Message for requests to generate a + client certificate signed by the Cluster + CA. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.GenerateClientCertificateResponse: + Message returned by a + GenerateClientCertificate operation. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/clusters/*}:generateClientCertificate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_generate_client_certificate( + request, metadata + ) + pb_request = service.GenerateClientCertificateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.GenerateClientCertificateResponse() + pb_resp = service.GenerateClientCertificateResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_client_certificate(resp) + return resp + + class _GetBackup(AlloyDBAdminRestStub): + def __hash__(self): + return hash("GetBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Backup: + r"""Call the get backup method over HTTP. + + Args: + request (~.service.GetBackupRequest): + The request object. Message for getting a Backup + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Backup: + Message describing Backup object + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/backups/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup(request, metadata) + pb_request = service.GetBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Backup() + pb_resp = resources.Backup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup(resp) + return resp + + class _GetCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("GetCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Cluster: + r"""Call the get cluster method over HTTP. + + Args: + request (~.service.GetClusterRequest): + The request object. Message for getting a Cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Cluster: + A cluster is a collection of regional + AlloyDB resources. It can include a + primary instance and one or more read + pool instances. All cluster resources + share a storage layer, which scales as + needed. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/clusters/*}", + }, + ] + request, metadata = self._interceptor.pre_get_cluster(request, metadata) + pb_request = service.GetClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Cluster() + pb_resp = resources.Cluster.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cluster(resp) + return resp + + class _GetConnectionInfo(AlloyDBAdminRestStub): + def __hash__(self): + return hash("GetConnectionInfo") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetConnectionInfoRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ConnectionInfo: + r"""Call the get connection info method over HTTP. + + Args: + request (~.service.GetConnectionInfoRequest): + The request object. Request message for + GetConnectionInfo. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.ConnectionInfo: + ConnectionInfo singleton resource. + https://google.aip.dev/156 + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/clusters/*/instances/*}/connectionInfo", + }, + ] + request, metadata = self._interceptor.pre_get_connection_info( + request, metadata + ) + pb_request = service.GetConnectionInfoRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.ConnectionInfo() + pb_resp = resources.ConnectionInfo.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_connection_info(resp) + return resp + + class _GetInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("GetInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.service.GetInstanceRequest): + The request object. Message for getting a Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Instance: + An Instance is a computing unit that + an end customer can connect to. It's the + main unit of computing resources in + AlloyDB. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}", + }, + ] + request, metadata = self._interceptor.pre_get_instance(request, metadata) + pb_request = service.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Instance() + pb_resp = resources.Instance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance(resp) + return resp + + class _ListBackups(AlloyDBAdminRestStub): + def __hash__(self): + return hash("ListBackups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListBackupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListBackupsResponse: + r"""Call the list backups method over HTTP. + + Args: + request (~.service.ListBackupsRequest): + The request object. Message for requesting list of + Backups + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListBackupsResponse: + Message for response to listing + Backups + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*}/backups", + }, + ] + request, metadata = self._interceptor.pre_list_backups(request, metadata) + pb_request = service.ListBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListBackupsResponse() + pb_resp = service.ListBackupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backups(resp) + return resp + + class _ListClusters(AlloyDBAdminRestStub): + def __hash__(self): + return hash("ListClusters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListClustersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListClustersResponse: + r"""Call the list clusters method over HTTP. + + Args: + request (~.service.ListClustersRequest): + The request object. Message for requesting list of + Clusters + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListClustersResponse: + Message for response to listing + Clusters + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*}/clusters", + }, + ] + request, metadata = self._interceptor.pre_list_clusters(request, metadata) + pb_request = service.ListClustersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListClustersResponse() + pb_resp = service.ListClustersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_clusters(resp) + return resp + + class _ListInstances(AlloyDBAdminRestStub): + def __hash__(self): + return hash("ListInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.service.ListInstancesRequest): + The request object. Message for requesting list of + Instances + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListInstancesResponse: + Message for response to listing + Instances + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/clusters/*}/instances", + }, + ] + request, metadata = self._interceptor.pre_list_instances(request, metadata) + pb_request = service.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListInstancesResponse() + pb_resp = service.ListInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instances(resp) + return resp + + class _ListSupportedDatabaseFlags(AlloyDBAdminRestStub): + def __hash__(self): + return hash("ListSupportedDatabaseFlags") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListSupportedDatabaseFlagsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListSupportedDatabaseFlagsResponse: + r"""Call the list supported database + flags method over HTTP. + + Args: + request (~.service.ListSupportedDatabaseFlagsRequest): + The request object. Message for listing the information + about the supported Database flags. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListSupportedDatabaseFlagsResponse: + Message for response to listing + SupportedDatabaseFlags. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*}/supportedDatabaseFlags", + }, + ] + request, metadata = self._interceptor.pre_list_supported_database_flags( + request, metadata + ) + pb_request = service.ListSupportedDatabaseFlagsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListSupportedDatabaseFlagsResponse() + pb_resp = service.ListSupportedDatabaseFlagsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_supported_database_flags(resp) + return resp + + class _PromoteCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("PromoteCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.PromoteClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the promote cluster method over HTTP. + + Args: + request (~.service.PromoteClusterRequest): + The request object. Message for promoting a Cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/clusters/*}:promote", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_promote_cluster(request, metadata) + pb_request = service.PromoteClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_promote_cluster(resp) + return resp + + class _RestartInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("RestartInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.RestartInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the restart instance method over HTTP. + + Args: + request (~.service.RestartInstanceRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}:restart", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_restart_instance( + request, metadata + ) + pb_request = service.RestartInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restart_instance(resp) + return resp + + class _RestoreCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("RestoreCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.RestoreClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the restore cluster method over HTTP. + + Args: + request (~.service.RestoreClusterRequest): + The request object. Message for restoring a Cluster from + a backup or another cluster at a given + point in time. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*}/clusters:restore", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_restore_cluster(request, metadata) + pb_request = service.RestoreClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restore_cluster(resp) + return resp + + class _UpdateBackup(AlloyDBAdminRestStub): + def __hash__(self): + return hash("UpdateBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update backup method over HTTP. + + Args: + request (~.service.UpdateBackupRequest): + The request object. Message for updating a Backup + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{backup.name=projects/*/locations/*/backups/*}", + "body": "backup", + }, + ] + request, metadata = self._interceptor.pre_update_backup(request, metadata) + pb_request = service.UpdateBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_backup(resp) + return resp + + class _UpdateCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("UpdateCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update cluster method over HTTP. + + Args: + request (~.service.UpdateClusterRequest): + The request object. Message for updating a Cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{cluster.name=projects/*/locations/*/clusters/*}", + "body": "cluster", + }, + ] + request, metadata = self._interceptor.pre_update_cluster(request, metadata) + pb_request = service.UpdateClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_cluster(resp) + return resp + + class _UpdateInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("UpdateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.service.UpdateInstanceRequest): + The request object. Message for updating a Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{instance.name=projects/*/locations/*/clusters/*/instances/*}", + "body": "instance", + }, + ] + request, metadata = self._interceptor.pre_update_instance(request, metadata) + pb_request = service.UpdateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_instance(resp) + return resp + + @property + def batch_create_instances( + self, + ) -> Callable[[service.BatchCreateInstancesRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup( + self, + ) -> Callable[[service.CreateBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_cluster( + self, + ) -> Callable[[service.CreateClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_instance( + self, + ) -> Callable[[service.CreateInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_secondary_cluster( + self, + ) -> Callable[[service.CreateSecondaryClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSecondaryCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_secondary_instance( + self, + ) -> Callable[[service.CreateSecondaryInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSecondaryInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup( + self, + ) -> Callable[[service.DeleteBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_cluster( + self, + ) -> Callable[[service.DeleteClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance( + self, + ) -> Callable[[service.DeleteInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def failover_instance( + self, + ) -> Callable[[service.FailoverInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_client_certificate( + self, + ) -> Callable[ + [service.GenerateClientCertificateRequest], + service.GenerateClientCertificateResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateClientCertificate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup(self) -> Callable[[service.GetBackupRequest], resources.Backup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_cluster(self) -> Callable[[service.GetClusterRequest], resources.Cluster]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_connection_info( + self, + ) -> Callable[[service.GetConnectionInfoRequest], resources.ConnectionInfo]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetConnectionInfo(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance( + self, + ) -> Callable[[service.GetInstanceRequest], resources.Instance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backups( + self, + ) -> Callable[[service.ListBackupsRequest], service.ListBackupsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_clusters( + self, + ) -> Callable[[service.ListClustersRequest], service.ListClustersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListClusters(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances( + self, + ) -> Callable[[service.ListInstancesRequest], service.ListInstancesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_supported_database_flags( + self, + ) -> Callable[ + [service.ListSupportedDatabaseFlagsRequest], + service.ListSupportedDatabaseFlagsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSupportedDatabaseFlags(self._session, self._host, self._interceptor) # type: ignore + + @property + def promote_cluster( + self, + ) -> Callable[[service.PromoteClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PromoteCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def restart_instance( + self, + ) -> Callable[[service.RestartInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestartInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def restore_cluster( + self, + ) -> Callable[[service.RestoreClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup( + self, + ) -> Callable[[service.UpdateBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_cluster( + self, + ) -> Callable[[service.UpdateClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance( + self, + ) -> Callable[[service.UpdateInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(AlloyDBAdminRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(AlloyDBAdminRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(AlloyDBAdminRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(AlloyDBAdminRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(AlloyDBAdminRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(AlloyDBAdminRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AlloyDBAdminRestTransport",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/__init__.py new file mode 100644 index 000000000000..b5b70ff1bbed --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/__init__.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .resources import ( + AutomatedBackupPolicy, + Backup, + BackupSource, + Cluster, + ConnectionInfo, + ContinuousBackupConfig, + ContinuousBackupInfo, + ContinuousBackupSource, + DatabaseVersion, + EncryptionConfig, + EncryptionInfo, + Instance, + InstanceView, + MigrationSource, + SslConfig, + SupportedDatabaseFlag, + UserPassword, +) +from .service import ( + BatchCreateInstancesMetadata, + BatchCreateInstancesRequest, + BatchCreateInstancesResponse, + BatchCreateInstanceStatus, + CreateBackupRequest, + CreateClusterRequest, + CreateInstanceRequest, + CreateInstanceRequests, + CreateSecondaryClusterRequest, + CreateSecondaryInstanceRequest, + DeleteBackupRequest, + DeleteClusterRequest, + DeleteInstanceRequest, + FailoverInstanceRequest, + GenerateClientCertificateRequest, + GenerateClientCertificateResponse, + GetBackupRequest, + GetClusterRequest, + GetConnectionInfoRequest, + GetInstanceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListClustersRequest, + ListClustersResponse, + ListInstancesRequest, + ListInstancesResponse, + ListSupportedDatabaseFlagsRequest, + ListSupportedDatabaseFlagsResponse, + OperationMetadata, + PromoteClusterRequest, + RestartInstanceRequest, + RestoreClusterRequest, + UpdateBackupRequest, + UpdateClusterRequest, + UpdateInstanceRequest, +) + +__all__ = ( + "AutomatedBackupPolicy", + "Backup", + "BackupSource", + "Cluster", + "ConnectionInfo", + "ContinuousBackupConfig", + "ContinuousBackupInfo", + "ContinuousBackupSource", + "EncryptionConfig", + "EncryptionInfo", + "Instance", + "MigrationSource", + "SslConfig", + "SupportedDatabaseFlag", + "UserPassword", + "DatabaseVersion", + "InstanceView", + "BatchCreateInstancesMetadata", + "BatchCreateInstancesRequest", + "BatchCreateInstancesResponse", + "BatchCreateInstanceStatus", + "CreateBackupRequest", + "CreateClusterRequest", + "CreateInstanceRequest", + "CreateInstanceRequests", + "CreateSecondaryClusterRequest", + "CreateSecondaryInstanceRequest", + "DeleteBackupRequest", + "DeleteClusterRequest", + "DeleteInstanceRequest", + "FailoverInstanceRequest", + "GenerateClientCertificateRequest", + "GenerateClientCertificateResponse", + "GetBackupRequest", + "GetClusterRequest", + "GetConnectionInfoRequest", + "GetInstanceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "ListClustersRequest", + "ListClustersResponse", + "ListInstancesRequest", + "ListInstancesResponse", + "ListSupportedDatabaseFlagsRequest", + "ListSupportedDatabaseFlagsResponse", + "OperationMetadata", + "PromoteClusterRequest", + "RestartInstanceRequest", + "RestoreClusterRequest", + "UpdateBackupRequest", + "UpdateClusterRequest", + "UpdateInstanceRequest", +) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py new file mode 100644 index 000000000000..4303d2b69087 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py @@ -0,0 +1,1742 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.alloydb.v1alpha", + manifest={ + "DatabaseVersion", + "InstanceView", + "UserPassword", + "MigrationSource", + "EncryptionConfig", + "EncryptionInfo", + "SslConfig", + "AutomatedBackupPolicy", + "ContinuousBackupConfig", + "ContinuousBackupInfo", + "BackupSource", + "ContinuousBackupSource", + "Cluster", + "Instance", + "ConnectionInfo", + "Backup", + "SupportedDatabaseFlag", + }, +) + + +class DatabaseVersion(proto.Enum): + r"""The supported database engine versions. + + Values: + DATABASE_VERSION_UNSPECIFIED (0): + This is an unknown database version. + POSTGRES_13 (1): + DEPRECATED - The database version is Postgres + 13. + POSTGRES_14 (2): + The database version is Postgres 14. + """ + DATABASE_VERSION_UNSPECIFIED = 0 + POSTGRES_13 = 1 + POSTGRES_14 = 2 + + +class InstanceView(proto.Enum): + r"""View on Instance. Pass this enum to rpcs that returns an + Instance message to control which subsets of fields to get. + + Values: + INSTANCE_VIEW_UNSPECIFIED (0): + INSTANCE_VIEW_UNSPECIFIED Not specified, equivalent to + BASIC. + INSTANCE_VIEW_BASIC (1): + BASIC server responses for a primary or read + instance include all the relevant instance + details, excluding the details of each node in + the instance. The default value. + INSTANCE_VIEW_FULL (2): + FULL response is equivalent to BASIC for + primary instance (for now). For read pool + instance, this includes details of each node in + the pool. + """ + INSTANCE_VIEW_UNSPECIFIED = 0 + INSTANCE_VIEW_BASIC = 1 + INSTANCE_VIEW_FULL = 2 + + +class UserPassword(proto.Message): + r"""The username/password for a database user. Used for + specifying initial users at cluster creation time. + + Attributes: + user (str): + The database username. + password (str): + The initial password for the user. + """ + + user: str = proto.Field( + proto.STRING, + number=1, + ) + password: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MigrationSource(proto.Message): + r"""Subset of the source instance configuration that is available + when reading the cluster resource. + + Attributes: + host_port (str): + Output only. The host and port of the + on-premises instance in host:port format + reference_id (str): + Output only. Place holder for the external + source identifier(e.g DMS job name) that created + the cluster. + source_type (google.cloud.alloydb_v1alpha.types.MigrationSource.MigrationSourceType): + Output only. Type of migration source. + """ + + class MigrationSourceType(proto.Enum): + r"""Denote the type of migration source that created this + cluster. + + Values: + MIGRATION_SOURCE_TYPE_UNSPECIFIED (0): + Migration source is unknown. + DMS (1): + DMS source means the cluster was created via + DMS migration job. + """ + MIGRATION_SOURCE_TYPE_UNSPECIFIED = 0 + DMS = 1 + + host_port: str = proto.Field( + proto.STRING, + number=1, + ) + reference_id: str = proto.Field( + proto.STRING, + number=2, + ) + source_type: MigrationSourceType = proto.Field( + proto.ENUM, + number=3, + enum=MigrationSourceType, + ) + + +class EncryptionConfig(proto.Message): + r"""EncryptionConfig describes the encryption config of a cluster + or a backup that is encrypted with a CMEK (customer-managed + encryption key). + + Attributes: + kms_key_name (str): + The fully-qualified resource name of the KMS key. Each Cloud + KMS key is regionalized and has the following format: + projects/[PROJECT]/locations/[REGION]/keyRings/[RING]/cryptoKeys/[KEY_NAME] + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class EncryptionInfo(proto.Message): + r"""EncryptionInfo describes the encryption information of a + cluster or a backup. + + Attributes: + encryption_type (google.cloud.alloydb_v1alpha.types.EncryptionInfo.Type): + Output only. Type of encryption. + kms_key_versions (MutableSequence[str]): + Output only. Cloud KMS key versions that are + being used to protect the database or the + backup. + """ + + class Type(proto.Enum): + r"""Possible encryption types. + + Values: + TYPE_UNSPECIFIED (0): + Encryption type not specified. Defaults to + GOOGLE_DEFAULT_ENCRYPTION. + GOOGLE_DEFAULT_ENCRYPTION (1): + The data is encrypted at rest with a key that + is fully managed by Google. No key version will + be populated. This is the default state. + CUSTOMER_MANAGED_ENCRYPTION (2): + The data is encrypted at rest with a key that + is managed by the customer. KMS key versions + will be populated. + """ + TYPE_UNSPECIFIED = 0 + GOOGLE_DEFAULT_ENCRYPTION = 1 + CUSTOMER_MANAGED_ENCRYPTION = 2 + + encryption_type: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + kms_key_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class SslConfig(proto.Message): + r"""SSL configuration for an AlloyDB Cluster. + + Attributes: + ssl_mode (google.cloud.alloydb_v1alpha.types.SslConfig.SslMode): + Optional. SSL mode. Specifies client-server + SSL/TLS connection behavior. + ca_source (google.cloud.alloydb_v1alpha.types.SslConfig.CaSource): + Optional. Certificate Authority (CA) source. Only + CA_SOURCE_MANAGED is supported currently, and is the default + value. + """ + + class SslMode(proto.Enum): + r"""SSL mode options. + + Values: + SSL_MODE_UNSPECIFIED (0): + SSL mode not specified. Defaults to SSL_MODE_ALLOW. + SSL_MODE_ALLOW (1): + SSL connections are optional. CA verification + not enforced. + SSL_MODE_REQUIRE (2): + SSL connections are required. CA verification + not enforced. Clients may use locally + self-signed certificates (default psql client + behavior). + SSL_MODE_VERIFY_CA (3): + SSL connections are required. CA verification + enforced. Clients must have certificates signed + by a Cluster CA, e.g. via + GenerateClientCertificate. + """ + SSL_MODE_UNSPECIFIED = 0 + SSL_MODE_ALLOW = 1 + SSL_MODE_REQUIRE = 2 + SSL_MODE_VERIFY_CA = 3 + + class CaSource(proto.Enum): + r"""Certificate Authority (CA) source for SSL/TLS certificates. + + Values: + CA_SOURCE_UNSPECIFIED (0): + Certificate Authority (CA) source not specified. Defaults to + CA_SOURCE_MANAGED. + CA_SOURCE_MANAGED (1): + Certificate Authority (CA) managed by the + AlloyDB Cluster. + """ + CA_SOURCE_UNSPECIFIED = 0 + CA_SOURCE_MANAGED = 1 + + ssl_mode: SslMode = proto.Field( + proto.ENUM, + number=1, + enum=SslMode, + ) + ca_source: CaSource = proto.Field( + proto.ENUM, + number=2, + enum=CaSource, + ) + + +class AutomatedBackupPolicy(proto.Message): + r"""Message describing the user-specified automated backup + policy. + All fields in the automated backup policy are optional. Defaults + for each field are provided if they are not set. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + weekly_schedule (google.cloud.alloydb_v1alpha.types.AutomatedBackupPolicy.WeeklySchedule): + Weekly schedule for the Backup. + + This field is a member of `oneof`_ ``schedule``. + time_based_retention (google.cloud.alloydb_v1alpha.types.AutomatedBackupPolicy.TimeBasedRetention): + Time-based Backup retention policy. + + This field is a member of `oneof`_ ``retention``. + quantity_based_retention (google.cloud.alloydb_v1alpha.types.AutomatedBackupPolicy.QuantityBasedRetention): + Quantity-based Backup retention policy to + retain recent backups. + + This field is a member of `oneof`_ ``retention``. + enabled (bool): + Whether automated automated backups are + enabled. If not set, defaults to true. + + This field is a member of `oneof`_ ``_enabled``. + backup_window (google.protobuf.duration_pb2.Duration): + The length of the time window during which a + backup can be taken. If a backup does not + succeed within this time window, it will be + canceled and considered failed. + + The backup window must be at least 5 minutes + long. There is no upper bound on the window. If + not set, it defaults to 1 hour. + encryption_config (google.cloud.alloydb_v1alpha.types.EncryptionConfig): + Optional. The encryption config can be + specified to encrypt the backups with a + customer-managed encryption key (CMEK). When + this field is not specified, the backup will + then use default encryption scheme to protect + the user data. + location (str): + The location where the backup will be stored. + Currently, the only supported option is to store + the backup in the same region as the cluster. + If empty, defaults to the region of the cluster. + labels (MutableMapping[str, str]): + Labels to apply to backups created using this + configuration. + """ + + class WeeklySchedule(proto.Message): + r"""A weekly schedule starts a backup at prescribed start times within a + day, for the specified days of the week. + + The weekly schedule message is flexible and can be used to create + many types of schedules. For example, to have a daily backup that + starts at 22:00, configure the ``start_times`` field to have one + element "22:00" and the ``days_of_week`` field to have all seven + days of the week. + + Attributes: + start_times (MutableSequence[google.type.timeofday_pb2.TimeOfDay]): + The times during the day to start a backup. + The start times are assumed to be in UTC and to + be an exact hour (e.g., 04:00:00). + If no start times are provided, a single fixed + start time is chosen arbitrarily. + days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + The days of the week to perform a backup. + If this field is left empty, the default of + every day of the week is used. + """ + + start_times: MutableSequence[timeofday_pb2.TimeOfDay] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=timeofday_pb2.TimeOfDay, + ) + days_of_week: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=dayofweek_pb2.DayOfWeek, + ) + + class TimeBasedRetention(proto.Message): + r"""A time based retention policy specifies that all backups + within a certain time period should be retained. + + Attributes: + retention_period (google.protobuf.duration_pb2.Duration): + The retention period. + """ + + retention_period: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + class QuantityBasedRetention(proto.Message): + r"""A quantity based policy specifies that a certain number of + the most recent successful backups should be retained. + + Attributes: + count (int): + The number of backups to retain. + """ + + count: int = proto.Field( + proto.INT32, + number=1, + ) + + weekly_schedule: WeeklySchedule = proto.Field( + proto.MESSAGE, + number=2, + oneof="schedule", + message=WeeklySchedule, + ) + time_based_retention: TimeBasedRetention = proto.Field( + proto.MESSAGE, + number=4, + oneof="retention", + message=TimeBasedRetention, + ) + quantity_based_retention: QuantityBasedRetention = proto.Field( + proto.MESSAGE, + number=5, + oneof="retention", + message=QuantityBasedRetention, + ) + enabled: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + backup_window: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + encryption_config: "EncryptionConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="EncryptionConfig", + ) + location: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + + +class ContinuousBackupConfig(proto.Message): + r"""ContinuousBackupConfig describes the continuous backups + recovery configurations of a cluster. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enabled (bool): + Whether ContinuousBackup is enabled. + + This field is a member of `oneof`_ ``_enabled``. + recovery_window_days (int): + The number of days backups and logs will be + retained, which determines the window of time + that data is recoverable for. If not set, it + defaults to 14 days. + encryption_config (google.cloud.alloydb_v1alpha.types.EncryptionConfig): + The encryption config can be specified to + encrypt the backups with a customer-managed + encryption key (CMEK). When this field is not + specified, the backup will then use default + encryption scheme to protect the user data. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + recovery_window_days: int = proto.Field( + proto.INT32, + number=4, + ) + encryption_config: "EncryptionConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="EncryptionConfig", + ) + + +class ContinuousBackupInfo(proto.Message): + r"""ContinuousBackupInfo describes the continuous backup + properties of a cluster. + + Attributes: + encryption_info (google.cloud.alloydb_v1alpha.types.EncryptionInfo): + Output only. The encryption information for + the WALs and backups required for + ContinuousBackup. + enabled_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. When ContinuousBackup was most + recently enabled. Set to null if + ContinuousBackup is not enabled. + schedule (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Output only. Days of the week on which a + continuous backup is taken. Output only field. + Ignored if passed into the request. + """ + + encryption_info: "EncryptionInfo" = proto.Field( + proto.MESSAGE, + number=1, + message="EncryptionInfo", + ) + enabled_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + schedule: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=3, + enum=dayofweek_pb2.DayOfWeek, + ) + + +class BackupSource(proto.Message): + r"""Message describing a BackupSource. + + Attributes: + backup_uid (str): + Output only. The system-generated UID of the + backup which was used to create this resource. + The UID is generated when the backup is created, + and it is retained until the backup is deleted. + backup_name (str): + Required. The name of the backup resource with the format: + + - projects/{project}/locations/{region}/backups/{backup_id} + """ + + backup_uid: str = proto.Field( + proto.STRING, + number=2, + ) + backup_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ContinuousBackupSource(proto.Message): + r"""Message describing a ContinuousBackupSource. + + Attributes: + cluster (str): + Required. The source cluster from which to + restore. This cluster must have continuous + backup enabled for this operation to succeed. + For the required format, see the comment on the + Cluster.name field. + point_in_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The point in time to restore to. + """ + + cluster: str = proto.Field( + proto.STRING, + number=1, + ) + point_in_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class Cluster(proto.Message): + r"""A cluster is a collection of regional AlloyDB resources. It + can include a primary instance and one or more read pool + instances. All cluster resources share a storage layer, which + scales as needed. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backup_source (google.cloud.alloydb_v1alpha.types.BackupSource): + Output only. Cluster created from backup. + + This field is a member of `oneof`_ ``source``. + migration_source (google.cloud.alloydb_v1alpha.types.MigrationSource): + Output only. Cluster created via DMS + migration. + + This field is a member of `oneof`_ ``source``. + name (str): + Output only. The name of the cluster resource with the + format: + + - projects/{project}/locations/{region}/clusters/{cluster_id} + where the cluster ID segment should satisfy the regex + expression ``[a-z0-9-]+``. For more details see + https://google.aip.dev/122. The prefix of the cluster + resource name is the name of the parent resource: + - projects/{project}/locations/{region} + display_name (str): + User-settable and human-readable display name + for the Cluster. + uid (str): + Output only. The system-generated UID of the + resource. The UID is assigned when the resource + is created, and it is retained until it is + deleted. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Delete time stamp + labels (MutableMapping[str, str]): + Labels as key value pairs + state (google.cloud.alloydb_v1alpha.types.Cluster.State): + Output only. The current serving state of the + cluster. + cluster_type (google.cloud.alloydb_v1alpha.types.Cluster.ClusterType): + Output only. The type of the cluster. This is an output-only + field and it's populated at the Cluster creation time or the + Cluster promotion time. The cluster type is determined by + which RPC was used to create the cluster (i.e. + ``CreateCluster`` vs. ``CreateSecondaryCluster`` + database_version (google.cloud.alloydb_v1alpha.types.DatabaseVersion): + Output only. The database engine major + version. This is an output-only field and it's + populated at the Cluster creation time. This + field cannot be changed after cluster creation. + network (str): + Required. The resource link for the VPC network in which + cluster resources are created and from which they are + accessible via Private IP. The network must belong to the + same project as the cluster. It is specified in the form: + "projects/{project_number}/global/networks/{network_id}". + This is required to create a cluster. It can be updated, but + it cannot be removed. + etag (str): + For Resource freshness validation + (https://google.aip.dev/154) + annotations (MutableMapping[str, str]): + Annotations to allow client tools to store + small amount of arbitrary data. This is distinct + from labels. https://google.aip.dev/128 + reconciling (bool): + Output only. Reconciling + (https://google.aip.dev/128#reconciliation). Set + to true if the current state of Cluster does not + match the user's intended state, and the service + is actively updating the resource to reconcile + them. This can happen due to user-triggered + updates or system actions like failover or + maintenance. + initial_user (google.cloud.alloydb_v1alpha.types.UserPassword): + Input only. Initial user to setup during cluster creation. + Required. If used in ``RestoreCluster`` this is ignored. + automated_backup_policy (google.cloud.alloydb_v1alpha.types.AutomatedBackupPolicy): + The automated backup policy for this cluster. + If no policy is provided then the default policy + will be used. If backups are supported for the + cluster, the default policy takes one backup a + day, has a backup window of 1 hour, and retains + backups for 14 days. For more information on the + defaults, consult the documentation for the + message type. + ssl_config (google.cloud.alloydb_v1alpha.types.SslConfig): + SSL configuration for this AlloyDB Cluster. + encryption_config (google.cloud.alloydb_v1alpha.types.EncryptionConfig): + Optional. The encryption config can be + specified to encrypt the data disks and other + persistent data resources of a cluster with a + customer-managed encryption key (CMEK). When + this field is not specified, the cluster will + then use default encryption scheme to protect + the user data. + encryption_info (google.cloud.alloydb_v1alpha.types.EncryptionInfo): + Output only. The encryption information for + the cluster. + continuous_backup_config (google.cloud.alloydb_v1alpha.types.ContinuousBackupConfig): + Optional. Continuous backup configuration for + this cluster. + continuous_backup_info (google.cloud.alloydb_v1alpha.types.ContinuousBackupInfo): + Output only. Continuous backup properties for + this cluster. + secondary_config (google.cloud.alloydb_v1alpha.types.Cluster.SecondaryConfig): + Cross Region replication config specific to + SECONDARY cluster. + primary_config (google.cloud.alloydb_v1alpha.types.Cluster.PrimaryConfig): + Output only. Cross Region replication config + specific to PRIMARY cluster. + """ + + class State(proto.Enum): + r"""Cluster State + + Values: + STATE_UNSPECIFIED (0): + The state of the cluster is unknown. + READY (1): + The cluster is active and running. + STOPPED (2): + The cluster is stopped. All instances in the + cluster are stopped. Customers can start a + stopped cluster at any point and all their + instances will come back to life with same names + and IP resources. In this state, customer pays + for storage. + Associated backups could also be present in a + stopped cluster. + EMPTY (3): + The cluster is empty and has no associated + resources. All instances, associated storage and + backups have been deleted. + CREATING (4): + The cluster is being created. + DELETING (5): + The cluster is being deleted. + FAILED (6): + The creation of the cluster failed. + BOOTSTRAPPING (7): + The cluster is bootstrapping with data from + some other source. Direct mutations to the + cluster (e.g. adding read pool) are not allowed. + MAINTENANCE (8): + The cluster is under maintenance. AlloyDB + regularly performs maintenance and upgrades on + customer clusters. Updates on the cluster are + not allowed while the cluster is in this state. + PROMOTING (9): + The cluster is being promoted. + """ + STATE_UNSPECIFIED = 0 + READY = 1 + STOPPED = 2 + EMPTY = 3 + CREATING = 4 + DELETING = 5 + FAILED = 6 + BOOTSTRAPPING = 7 + MAINTENANCE = 8 + PROMOTING = 9 + + class ClusterType(proto.Enum): + r"""Type of Cluster + + Values: + CLUSTER_TYPE_UNSPECIFIED (0): + The type of the cluster is unknown. + PRIMARY (1): + Primary cluster that support read and write + operations. + SECONDARY (2): + Secondary cluster that is replicating from + another region. This only supports read. + """ + CLUSTER_TYPE_UNSPECIFIED = 0 + PRIMARY = 1 + SECONDARY = 2 + + class SecondaryConfig(proto.Message): + r"""Configuration information for the secondary cluster. This + should be set if and only if the cluster is of type SECONDARY. + + Attributes: + primary_cluster_name (str): + The name of the primary cluster name with the format: + + - projects/{project}/locations/{region}/clusters/{cluster_id} + """ + + primary_cluster_name: str = proto.Field( + proto.STRING, + number=1, + ) + + class PrimaryConfig(proto.Message): + r"""Configuration for the primary cluster. It has the list of + clusters that are replicating from this cluster. This should be + set if and only if the cluster is of type PRIMARY. + + Attributes: + secondary_cluster_names (MutableSequence[str]): + Output only. Names of the clusters that are + replicating from this cluster. + """ + + secondary_cluster_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + backup_source: "BackupSource" = proto.Field( + proto.MESSAGE, + number=15, + oneof="source", + message="BackupSource", + ) + migration_source: "MigrationSource" = proto.Field( + proto.MESSAGE, + number=16, + oneof="source", + message="MigrationSource", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + cluster_type: ClusterType = proto.Field( + proto.ENUM, + number=24, + enum=ClusterType, + ) + database_version: "DatabaseVersion" = proto.Field( + proto.ENUM, + number=9, + enum="DatabaseVersion", + ) + network: str = proto.Field( + proto.STRING, + number=10, + ) + etag: str = proto.Field( + proto.STRING, + number=11, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=12, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=13, + ) + initial_user: "UserPassword" = proto.Field( + proto.MESSAGE, + number=14, + message="UserPassword", + ) + automated_backup_policy: "AutomatedBackupPolicy" = proto.Field( + proto.MESSAGE, + number=17, + message="AutomatedBackupPolicy", + ) + ssl_config: "SslConfig" = proto.Field( + proto.MESSAGE, + number=18, + message="SslConfig", + ) + encryption_config: "EncryptionConfig" = proto.Field( + proto.MESSAGE, + number=19, + message="EncryptionConfig", + ) + encryption_info: "EncryptionInfo" = proto.Field( + proto.MESSAGE, + number=20, + message="EncryptionInfo", + ) + continuous_backup_config: "ContinuousBackupConfig" = proto.Field( + proto.MESSAGE, + number=27, + message="ContinuousBackupConfig", + ) + continuous_backup_info: "ContinuousBackupInfo" = proto.Field( + proto.MESSAGE, + number=28, + message="ContinuousBackupInfo", + ) + secondary_config: SecondaryConfig = proto.Field( + proto.MESSAGE, + number=22, + message=SecondaryConfig, + ) + primary_config: PrimaryConfig = proto.Field( + proto.MESSAGE, + number=23, + message=PrimaryConfig, + ) + + +class Instance(proto.Message): + r"""An Instance is a computing unit that an end customer can + connect to. It's the main unit of computing resources in + AlloyDB. + + Attributes: + name (str): + Output only. The name of the instance resource with the + format: + + - projects/{project}/locations/{region}/clusters/{cluster_id}/instances/{instance_id} + where the cluster and instance ID segments should satisfy + the regex expression ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?``, + e.g. 1-63 characters of lowercase letters, numbers, and + dashes, starting with a letter, and ending with a letter + or number. For more details see + https://google.aip.dev/122. The prefix of the instance + resource name is the name of the parent resource: + - projects/{project}/locations/{region}/clusters/{cluster_id} + display_name (str): + User-settable and human-readable display name + for the Instance. + uid (str): + Output only. The system-generated UID of the + resource. The UID is assigned when the resource + is created, and it is retained until it is + deleted. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Delete time stamp + labels (MutableMapping[str, str]): + Labels as key value pairs + state (google.cloud.alloydb_v1alpha.types.Instance.State): + Output only. The current serving state of the + instance. + instance_type (google.cloud.alloydb_v1alpha.types.Instance.InstanceType): + Required. The type of the instance. Specified + at creation time. + machine_config (google.cloud.alloydb_v1alpha.types.Instance.MachineConfig): + Configurations for the machines that host the + underlying database engine. + availability_type (google.cloud.alloydb_v1alpha.types.Instance.AvailabilityType): + Availability type of an Instance. + Defaults to REGIONAL for both primary and read + instances. Note that primary and read instances + can have different availability types. + gce_zone (str): + The Compute Engine zone that the instance + should serve from, per + https://cloud.google.com/compute/docs/regions-zones + This can ONLY be specified for ZONAL instances. + If present for a REGIONAL instance, an error + will be thrown. If this is absent for a ZONAL + instance, instance is created in a random zone + with available capacity. + database_flags (MutableMapping[str, str]): + Database flags. Set at instance level. + + - They are copied from primary instance on read instance + creation. + - Read instances can set new or override existing flags + that are relevant for reads, e.g. for enabling columnar + cache on a read instance. Flags set on read instance may + or may not be present on primary. + + This is a list of "key": "value" pairs. "key": The name of + the flag. These flags are passed at instance setup time, so + include both server options and system variables for + Postgres. Flags are specified with underscores, not hyphens. + "value": The value of the flag. Booleans are set to **on** + for true and **off** for false. This field must be omitted + if the flag doesn't take a value. + writable_node (google.cloud.alloydb_v1alpha.types.Instance.Node): + Output only. This is set for the read-write + VM of the PRIMARY instance only. + nodes (MutableSequence[google.cloud.alloydb_v1alpha.types.Instance.Node]): + Output only. List of available read-only VMs + in this instance, including the standby for a + PRIMARY instance. + query_insights_config (google.cloud.alloydb_v1alpha.types.Instance.QueryInsightsInstanceConfig): + Configuration for query insights. + read_pool_config (google.cloud.alloydb_v1alpha.types.Instance.ReadPoolConfig): + Read pool specific config. + ip_address (str): + Output only. The IP address for the Instance. + This is the connection endpoint for an end-user + application. + reconciling (bool): + Output only. Reconciling + (https://google.aip.dev/128#reconciliation). Set + to true if the current state of Instance does + not match the user's intended state, and the + service is actively updating the resource to + reconcile them. This can happen due to + user-triggered updates or system actions like + failover or maintenance. + etag (str): + For Resource freshness validation + (https://google.aip.dev/154) + annotations (MutableMapping[str, str]): + Annotations to allow client tools to store + small amount of arbitrary data. This is distinct + from labels. https://google.aip.dev/128 + """ + + class State(proto.Enum): + r"""Instance State + + Values: + STATE_UNSPECIFIED (0): + The state of the instance is unknown. + READY (1): + The instance is active and running. + STOPPED (2): + The instance is stopped. Instance name and IP + resources are preserved. + CREATING (3): + The instance is being created. + DELETING (4): + The instance is being deleted. + MAINTENANCE (5): + The instance is down for maintenance. + FAILED (6): + The creation of the instance failed or a + fatal error occurred during an operation on the + instance. Note: Instances in this state would + tried to be auto-repaired. And Customers should + be able to restart, update or delete these + instances. + BOOTSTRAPPING (8): + Index 7 is used in the producer apis for ROLLED_BACK state. + Keeping that index unused in case that state also needs to + exposed via consumer apis in future. The instance has been + configured to sync data from some other source. + PROMOTING (9): + The instance is being promoted. + """ + STATE_UNSPECIFIED = 0 + READY = 1 + STOPPED = 2 + CREATING = 3 + DELETING = 4 + MAINTENANCE = 5 + FAILED = 6 + BOOTSTRAPPING = 8 + PROMOTING = 9 + + class InstanceType(proto.Enum): + r"""Type of an Instance + + Values: + INSTANCE_TYPE_UNSPECIFIED (0): + The type of the instance is unknown. + PRIMARY (1): + PRIMARY instances support read and write + operations. + READ_POOL (2): + READ POOL instances support read operations only. Each read + pool instance consists of one or more homogeneous nodes. + + - Read pool of size 1 can only have zonal availability. + - Read pools with node count of 2 or more can have regional + availability (nodes are present in 2 or more zones in a + region). + SECONDARY (3): + SECONDARY instances support read operations + only. SECONDARY instance is a cross-region read + replica + """ + INSTANCE_TYPE_UNSPECIFIED = 0 + PRIMARY = 1 + READ_POOL = 2 + SECONDARY = 3 + + class AvailabilityType(proto.Enum): + r"""The Availability type of an instance. Potential values: + - ZONAL: The instance serves data from only one zone. Outages in + that zone affect instance availability. + - REGIONAL: The instance can serve data from more than one zone + in a region (it is highly available). + + Values: + AVAILABILITY_TYPE_UNSPECIFIED (0): + This is an unknown Availability type. + ZONAL (1): + Zonal available instance. + REGIONAL (2): + Regional (or Highly) available instance. + """ + AVAILABILITY_TYPE_UNSPECIFIED = 0 + ZONAL = 1 + REGIONAL = 2 + + class MachineConfig(proto.Message): + r"""MachineConfig describes the configuration of a machine. + + Attributes: + cpu_count (int): + The number of CPU's in the VM instance. + """ + + cpu_count: int = proto.Field( + proto.INT32, + number=1, + ) + + class Node(proto.Message): + r"""Details of a single node in the instance. + Nodes in an AlloyDB instance are ephemereal, they can change + during update, failover, autohealing and resize operations. + + Attributes: + zone_id (str): + The Compute Engine zone of the VM e.g. + "us-central1-b". + id (str): + The identifier of the VM e.g. + "test-read-0601-407e52be-ms3l". + ip (str): + The private IP address of the VM e.g. + "10.57.0.34". + state (str): + Determined by state of the compute VM and + postgres-service health. Compute VM state can + have values listed in + https://cloud.google.com/compute/docs/instances/instance-life-cycle + and postgres-service health can have values: + HEALTHY and UNHEALTHY. + """ + + zone_id: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + ip: str = proto.Field( + proto.STRING, + number=3, + ) + state: str = proto.Field( + proto.STRING, + number=4, + ) + + class QueryInsightsInstanceConfig(proto.Message): + r"""QueryInsights Instance specific configuration. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + record_application_tags (bool): + Record application tags for an instance. + This flag is turned "on" by default. + + This field is a member of `oneof`_ ``_record_application_tags``. + record_client_address (bool): + Record client address for an instance. Client + address is PII information. This flag is turned + "on" by default. + + This field is a member of `oneof`_ ``_record_client_address``. + query_string_length (int): + Query string length. The default value is + 1024. Any integer between 256 and 4500 is + considered valid. + query_plans_per_minute (int): + Number of query execution plans captured by + Insights per minute for all queries combined. + The default value is 5. Any integer between 0 + and 20 is considered valid. + + This field is a member of `oneof`_ ``_query_plans_per_minute``. + """ + + record_application_tags: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) + record_client_address: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + query_string_length: int = proto.Field( + proto.UINT32, + number=4, + ) + query_plans_per_minute: int = proto.Field( + proto.UINT32, + number=5, + optional=True, + ) + + class ReadPoolConfig(proto.Message): + r"""Configuration for a read pool instance. + + Attributes: + node_count (int): + Read capacity, i.e. number of nodes in a read + pool instance. + """ + + node_count: int = proto.Field( + proto.INT32, + number=1, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + instance_type: InstanceType = proto.Field( + proto.ENUM, + number=9, + enum=InstanceType, + ) + machine_config: MachineConfig = proto.Field( + proto.MESSAGE, + number=10, + message=MachineConfig, + ) + availability_type: AvailabilityType = proto.Field( + proto.ENUM, + number=11, + enum=AvailabilityType, + ) + gce_zone: str = proto.Field( + proto.STRING, + number=12, + ) + database_flags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + writable_node: Node = proto.Field( + proto.MESSAGE, + number=19, + message=Node, + ) + nodes: MutableSequence[Node] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message=Node, + ) + query_insights_config: QueryInsightsInstanceConfig = proto.Field( + proto.MESSAGE, + number=21, + message=QueryInsightsInstanceConfig, + ) + read_pool_config: ReadPoolConfig = proto.Field( + proto.MESSAGE, + number=14, + message=ReadPoolConfig, + ) + ip_address: str = proto.Field( + proto.STRING, + number=15, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=16, + ) + etag: str = proto.Field( + proto.STRING, + number=17, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=18, + ) + + +class ConnectionInfo(proto.Message): + r"""ConnectionInfo singleton resource. + https://google.aip.dev/156 + + Attributes: + name (str): + The name of the ConnectionInfo singleton resource, e.g.: + projects/{project}/locations/{location}/clusters/\ */instances/*/connectionInfo + This field currently has no semantic meaning. + ip_address (str): + Output only. The IP address for the Instance. + This is the connection endpoint for an end-user + application. + pem_certificate_chain (MutableSequence[str]): + Output only. The pem-encoded chain that may + be used to verify the X.509 certificate. + Expected to be in issuer-to-root order according + to RFC 5246. + instance_uid (str): + Output only. The unique ID of the Instance. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + ip_address: str = proto.Field( + proto.STRING, + number=2, + ) + pem_certificate_chain: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + instance_uid: str = proto.Field( + proto.STRING, + number=4, + ) + + +class Backup(proto.Message): + r"""Message describing Backup object + + Attributes: + name (str): + Output only. The name of the backup resource with the + format: + + - projects/{project}/locations/{region}/backups/{backup_id} + where the cluster and backup ID segments should satisfy + the regex expression ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?``, + e.g. 1-63 characters of lowercase letters, numbers, and + dashes, starting with a letter, and ending with a letter + or number. For more details see + https://google.aip.dev/122. The prefix of the backup + resource name is the name of the parent resource: + - projects/{project}/locations/{region} + display_name (str): + User-settable and human-readable display name + for the Backup. + uid (str): + Output only. The system-generated UID of the + resource. The UID is assigned when the resource + is created, and it is retained until it is + deleted. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Delete time stamp + labels (MutableMapping[str, str]): + Labels as key value pairs + state (google.cloud.alloydb_v1alpha.types.Backup.State): + Output only. The current state of the backup. + type_ (google.cloud.alloydb_v1alpha.types.Backup.Type): + The backup type, which suggests the trigger + for the backup. + description (str): + User-provided description of the backup. + cluster_uid (str): + Output only. The system-generated UID of the + cluster which was used to create this resource. + cluster_name (str): + Required. The full resource name of the backup source + cluster (e.g., projects//locations//clusters/). + reconciling (bool): + Output only. Reconciling + (https://google.aip.dev/128#reconciliation), if + true, indicates that the service is actively + updating the resource. This can happen due to + user-triggered updates or system actions like + failover or maintenance. + encryption_config (google.cloud.alloydb_v1alpha.types.EncryptionConfig): + Optional. The encryption config can be + specified to encrypt the backup with a + customer-managed encryption key (CMEK). When + this field is not specified, the backup will + then use default encryption scheme to protect + the user data. + encryption_info (google.cloud.alloydb_v1alpha.types.EncryptionInfo): + Output only. The encryption information for + the backup. + etag (str): + For Resource freshness validation + (https://google.aip.dev/154) + annotations (MutableMapping[str, str]): + Annotations to allow client tools to store + small amount of arbitrary data. This is distinct + from labels. https://google.aip.dev/128 + size_bytes (int): + Output only. The size of the backup in bytes. + expiry_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which after the backup is eligible + to be garbage collected. It is the duration specified by the + backup's retention policy, added to the backup's + create_time. + """ + + class State(proto.Enum): + r"""Backup State + + Values: + STATE_UNSPECIFIED (0): + The state of the backup is unknown. + READY (1): + The backup is ready. + CREATING (2): + The backup is creating. + FAILED (3): + The backup failed. + DELETING (4): + The backup is being deleted. + """ + STATE_UNSPECIFIED = 0 + READY = 1 + CREATING = 2 + FAILED = 3 + DELETING = 4 + + class Type(proto.Enum): + r"""Backup Type + + Values: + TYPE_UNSPECIFIED (0): + Backup Type is unknown. + ON_DEMAND (1): + ON_DEMAND backups that were triggered by the customer (e.g., + not AUTOMATED). + AUTOMATED (2): + AUTOMATED backups triggered by the automated + backups scheduler pursuant to an automated + backup policy. + CONTINUOUS (3): + CONTINUOUS backups triggered by the automated + backups scheduler due to a continuous backup + policy. + """ + TYPE_UNSPECIFIED = 0 + ON_DEMAND = 1 + AUTOMATED = 2 + CONTINUOUS = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=15, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + type_: Type = proto.Field( + proto.ENUM, + number=8, + enum=Type, + ) + description: str = proto.Field( + proto.STRING, + number=9, + ) + cluster_uid: str = proto.Field( + proto.STRING, + number=18, + ) + cluster_name: str = proto.Field( + proto.STRING, + number=10, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=11, + ) + encryption_config: "EncryptionConfig" = proto.Field( + proto.MESSAGE, + number=12, + message="EncryptionConfig", + ) + encryption_info: "EncryptionInfo" = proto.Field( + proto.MESSAGE, + number=13, + message="EncryptionInfo", + ) + etag: str = proto.Field( + proto.STRING, + number=14, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=16, + ) + size_bytes: int = proto.Field( + proto.INT64, + number=17, + ) + expiry_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=19, + message=timestamp_pb2.Timestamp, + ) + + +class SupportedDatabaseFlag(proto.Message): + r"""SupportedDatabaseFlag gives general information about a database + flag, like type and allowed values. This is a static value that is + defined on the server side, and it cannot be modified by callers. To + set the Database flags on a particular Instance, a caller should + modify the Instance.database_flags field. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + string_restrictions (google.cloud.alloydb_v1alpha.types.SupportedDatabaseFlag.StringRestrictions): + Restriction on STRING type value. + + This field is a member of `oneof`_ ``restrictions``. + integer_restrictions (google.cloud.alloydb_v1alpha.types.SupportedDatabaseFlag.IntegerRestrictions): + Restriction on INTEGER type value. + + This field is a member of `oneof`_ ``restrictions``. + name (str): + The name of the flag resource, following Google Cloud + conventions, e.g.: + + - projects/{project}/locations/{location}/flags/{flag} This + field currently has no semantic meaning. + flag_name (str): + The name of the database flag, e.g. "max_allowed_packets". + The is a possibly key for the Instance.database_flags map + field. + value_type (google.cloud.alloydb_v1alpha.types.SupportedDatabaseFlag.ValueType): + + accepts_multiple_values (bool): + Whether the database flag accepts multiple + values. If true, a comma-separated list of + stringified values may be specified. + supported_db_versions (MutableSequence[google.cloud.alloydb_v1alpha.types.DatabaseVersion]): + Major database engine versions for which this + flag is supported. + requires_db_restart (bool): + Whether setting or updating this flag on an + Instance requires a database restart. If a flag + that requires database restart is set, the + backend will automatically restart the database + (making sure to satisfy any availability SLO's). + """ + + class ValueType(proto.Enum): + r"""ValueType describes the semantic type of the value that the flag + accepts. Regardless of the ValueType, the Instance.database_flags + field accepts the stringified version of the value, i.e. "20" or + "3.14". + + Values: + VALUE_TYPE_UNSPECIFIED (0): + This is an unknown flag type. + STRING (1): + String type flag. + INTEGER (2): + Integer type flag. + FLOAT (3): + Float type flag. + NONE (4): + Denotes that the flag does not accept any + values. + """ + VALUE_TYPE_UNSPECIFIED = 0 + STRING = 1 + INTEGER = 2 + FLOAT = 3 + NONE = 4 + + class StringRestrictions(proto.Message): + r"""Restrictions on STRING type values + + Attributes: + allowed_values (MutableSequence[str]): + The list of allowed values, if bounded. This + field will be empty if there is a unbounded + number of allowed values. + """ + + allowed_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class IntegerRestrictions(proto.Message): + r"""Restrictions on INTEGER type values. + + Attributes: + min_value (google.protobuf.wrappers_pb2.Int64Value): + The minimum value that can be specified, if + applicable. + max_value (google.protobuf.wrappers_pb2.Int64Value): + The maximum value that can be specified, if + applicable. + """ + + min_value: wrappers_pb2.Int64Value = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.Int64Value, + ) + max_value: wrappers_pb2.Int64Value = proto.Field( + proto.MESSAGE, + number=2, + message=wrappers_pb2.Int64Value, + ) + + string_restrictions: StringRestrictions = proto.Field( + proto.MESSAGE, + number=7, + oneof="restrictions", + message=StringRestrictions, + ) + integer_restrictions: IntegerRestrictions = proto.Field( + proto.MESSAGE, + number=8, + oneof="restrictions", + message=IntegerRestrictions, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + flag_name: str = proto.Field( + proto.STRING, + number=2, + ) + value_type: ValueType = proto.Field( + proto.ENUM, + number=3, + enum=ValueType, + ) + accepts_multiple_values: bool = proto.Field( + proto.BOOL, + number=4, + ) + supported_db_versions: MutableSequence["DatabaseVersion"] = proto.RepeatedField( + proto.ENUM, + number=5, + enum="DatabaseVersion", + ) + requires_db_restart: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py new file mode 100644 index 000000000000..328d356d91e3 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py @@ -0,0 +1,1734 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.alloydb_v1alpha.types import resources + +__protobuf__ = proto.module( + package="google.cloud.alloydb.v1alpha", + manifest={ + "ListClustersRequest", + "ListClustersResponse", + "GetClusterRequest", + "CreateSecondaryClusterRequest", + "CreateClusterRequest", + "UpdateClusterRequest", + "DeleteClusterRequest", + "PromoteClusterRequest", + "RestoreClusterRequest", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "CreateSecondaryInstanceRequest", + "CreateInstanceRequests", + "BatchCreateInstancesRequest", + "BatchCreateInstancesResponse", + "BatchCreateInstancesMetadata", + "BatchCreateInstanceStatus", + "UpdateInstanceRequest", + "DeleteInstanceRequest", + "FailoverInstanceRequest", + "RestartInstanceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "GetBackupRequest", + "CreateBackupRequest", + "UpdateBackupRequest", + "DeleteBackupRequest", + "ListSupportedDatabaseFlagsRequest", + "ListSupportedDatabaseFlagsResponse", + "GenerateClientCertificateRequest", + "GenerateClientCertificateResponse", + "GetConnectionInfoRequest", + "OperationMetadata", + }, +) + + +class ListClustersRequest(proto.Message): + r"""Message for requesting list of Clusters + + Attributes: + parent (str): + Required. The name of the parent resource. For the required + format, see the comment on the Cluster.name field. + Additionally, you can perform an aggregated list operation + by specifying a value with the following format: + + - projects/{project}/locations/- + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Optional. Filtering results + order_by (str): + Optional. Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListClustersResponse(proto.Message): + r"""Message for response to listing Clusters + + Attributes: + clusters (MutableSequence[google.cloud.alloydb_v1alpha.types.Cluster]): + The list of Cluster + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + clusters: MutableSequence[resources.Cluster] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Cluster, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetClusterRequest(proto.Message): + r"""Message for getting a Cluster + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Cluster.name field. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateSecondaryClusterRequest(proto.Message): + r""" + + Attributes: + parent (str): + Required. The name of the parent resource + (the primary cluster). For the required format, + see the comment on the Cluster.name field. + cluster_id (str): + Required. ID of the requesting object (the + secondary cluster). + cluster (google.cloud.alloydb_v1alpha.types.Cluster): + Required. Configuration of the requesting + object (the secondary cluster). + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + create request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=2, + ) + cluster: resources.Cluster = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Cluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=5, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class CreateClusterRequest(proto.Message): + r"""Message for creating a Cluster + + Attributes: + parent (str): + Required. The name of the parent resource. + For the required format, see the comment on the + Cluster.name field. + cluster_id (str): + Required. ID of the requesting object. + cluster (google.cloud.alloydb_v1alpha.types.Cluster): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + create request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=2, + ) + cluster: resources.Cluster = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Cluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class UpdateClusterRequest(proto.Message): + r"""Message for updating a Cluster + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Cluster resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + cluster (google.cloud.alloydb_v1alpha.types.Cluster): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + update request. + allow_missing (bool): + Optional. If set to true, update succeeds even if cluster is + not found. In that case, a new cluster is created and + ``update_mask`` is ignored. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + cluster: resources.Cluster = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Cluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteClusterRequest(proto.Message): + r"""Message for deleting a Cluster + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Cluster.name field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + etag (str): + Optional. The current etag of the Cluster. + If an etag is provided and does not match the + current etag of the Cluster, deletion will be + blocked and an ABORTED error will be returned. + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + delete. + force (bool): + Optional. Whether to cascade delete child + instances for given cluster. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + force: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class PromoteClusterRequest(proto.Message): + r"""Message for promoting a Cluster + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Cluster.name field + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + etag (str): + Optional. The current etag of the Cluster. + If an etag is provided and does not match the + current etag of the Cluster, deletion will be + blocked and an ABORTED error will be returned. + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class RestoreClusterRequest(proto.Message): + r"""Message for restoring a Cluster from a backup or another + cluster at a given point in time. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backup_source (google.cloud.alloydb_v1alpha.types.BackupSource): + Backup source. + + This field is a member of `oneof`_ ``source``. + continuous_backup_source (google.cloud.alloydb_v1alpha.types.ContinuousBackupSource): + ContinuousBackup source. Continuous backup + needs to be enabled in the source cluster for + this operation to succeed. + + This field is a member of `oneof`_ ``source``. + parent (str): + Required. The name of the parent resource. + For the required format, see the comment on the + Cluster.name field. + cluster_id (str): + Required. ID of the requesting object. + cluster (google.cloud.alloydb_v1alpha.types.Cluster): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + import request. + """ + + backup_source: resources.BackupSource = proto.Field( + proto.MESSAGE, + number=4, + oneof="source", + message=resources.BackupSource, + ) + continuous_backup_source: resources.ContinuousBackupSource = proto.Field( + proto.MESSAGE, + number=8, + oneof="source", + message=resources.ContinuousBackupSource, + ) + parent: str = proto.Field( + proto.STRING, + number=1, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=2, + ) + cluster: resources.Cluster = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Cluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=5, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class ListInstancesRequest(proto.Message): + r"""Message for requesting list of Instances + + Attributes: + parent (str): + Required. The name of the parent resource. For the required + format, see the comment on the Instance.name field. + Additionally, you can perform an aggregated list operation + by specifying a value with one of the following formats: + + - projects/{project}/locations/-/clusters/- + - projects/{project}/locations/{region}/clusters/- + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Optional. Filtering results + order_by (str): + Optional. Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInstancesResponse(proto.Message): + r"""Message for response to listing Instances + + Attributes: + instances (MutableSequence[google.cloud.alloydb_v1alpha.types.Instance]): + The list of Instance + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + instances: MutableSequence[resources.Instance] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Instance, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetInstanceRequest(proto.Message): + r"""Message for getting a Instance + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Instance.name field. + view (google.cloud.alloydb_v1alpha.types.InstanceView): + The view of the instance to return. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: resources.InstanceView = proto.Field( + proto.ENUM, + number=2, + enum=resources.InstanceView, + ) + + +class CreateInstanceRequest(proto.Message): + r"""Message for creating a Instance + + Attributes: + parent (str): + Required. The name of the parent resource. + For the required format, see the comment on the + Instance.name field. + instance_id (str): + Required. ID of the requesting object. + instance (google.cloud.alloydb_v1alpha.types.Instance): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + create request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance: resources.Instance = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Instance, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class CreateSecondaryInstanceRequest(proto.Message): + r"""Message for creating a Secondary Instance + + Attributes: + parent (str): + Required. The name of the parent resource. + For the required format, see the comment on the + Instance.name field. + instance_id (str): + Required. ID of the requesting object. + instance (google.cloud.alloydb_v1alpha.types.Instance): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + create request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance: resources.Instance = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Instance, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class CreateInstanceRequests(proto.Message): + r"""See usage below for notes. + + Attributes: + create_instance_requests (MutableSequence[google.cloud.alloydb_v1alpha.types.CreateInstanceRequest]): + Required. Primary and read replica instances + to be created. This list should not be empty. + """ + + create_instance_requests: MutableSequence[ + "CreateInstanceRequest" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="CreateInstanceRequest", + ) + + +class BatchCreateInstancesRequest(proto.Message): + r"""Message for creating a batch of instances under the specified + cluster. + + Attributes: + parent (str): + Required. The name of the parent resource. + requests (google.cloud.alloydb_v1alpha.types.CreateInstanceRequests): + Required. Resources being created. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: "CreateInstanceRequests" = proto.Field( + proto.MESSAGE, + number=2, + message="CreateInstanceRequests", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class BatchCreateInstancesResponse(proto.Message): + r"""Message for creating batches of instances in a cluster. + + Attributes: + instances (MutableSequence[google.cloud.alloydb_v1alpha.types.Instance]): + Created instances. + """ + + instances: MutableSequence[resources.Instance] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Instance, + ) + + +class BatchCreateInstancesMetadata(proto.Message): + r"""Message for metadata that is specific to BatchCreateInstances + API. + + Attributes: + instance_targets (MutableSequence[str]): + The instances being created in the API call. + Each string in this list is the server defined + resource path for target instances in the + request and for the format of each string, see + the comment on the Instance.name field. + instance_statuses (MutableMapping[str, google.cloud.alloydb_v1alpha.types.BatchCreateInstanceStatus]): + A map representing state of the instances involved in the + BatchCreateInstances operation during the operation + execution. The instance state will be in STATE_UNSPECIFIED + state if the instance has not yet been picked up for + processing. The key of the map is the name of the instance + resource. For the format, see the comment on the + Instance.name field. + """ + + instance_targets: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + instance_statuses: MutableMapping[ + str, "BatchCreateInstanceStatus" + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message="BatchCreateInstanceStatus", + ) + + +class BatchCreateInstanceStatus(proto.Message): + r"""Message for current status of an instance in the + BatchCreateInstances operation. For example, lets say a + BatchCreateInstances workflow has 4 instances, Instance1 through + Instance4. Lets also assume that 2 instances succeeded but the third + failed to create and the 4th was never picked up for creation + because of failure of the previous one. Then, resulting states would + look something like: + + 1. Instance1 = ROLLED_BACK + 2. Instance2 = ROLLED_BACK + 3. Instance3 = FAILED + 4. Instance4 = FAILED However, while the operation is running, the + instance might be in other states including PENDING_CREATE, + ACTIVE, DELETING and CREATING. The states / do not get further + updated once the operation is done. + + Attributes: + state (google.cloud.alloydb_v1alpha.types.BatchCreateInstanceStatus.State): + The current state of an instance involved in the batch + create operation. Once the operation is complete, the final + state of the instances in the LRO can be one of: + + 1. ACTIVE, indicating that instances were created + successfully + 2. FAILED, indicating that a particular instance failed + creation + 3. ROLLED_BACK indicating that although the instance was + created successfully, it had to be rolled back and + deleted due to failure in other steps of the workflow. + error_msg (str): + DEPRECATED - Use the error field instead. + Error, if any error occurred and is available, + during instance creation. + error (google.rpc.status_pb2.Status): + The RPC status of the instance creation + operation. This field will be present if an + error happened during the instance creation. + type_ (google.cloud.alloydb_v1alpha.types.Instance.InstanceType): + + """ + + class State(proto.Enum): + r"""State contains all valid instance states for the + BatchCreateInstances operation. This is mainly used for status + reporting through the LRO metadata. + + Values: + STATE_UNSPECIFIED (0): + The state of the instance is unknown. + PENDING_CREATE (1): + Instance is pending creation and has not yet + been picked up for processsing in the backend. + READY (2): + The instance is active and running. + CREATING (3): + The instance is being created. + DELETING (4): + The instance is being deleted. + FAILED (5): + The creation of the instance failed or a + fatal error occurred during an operation on the + instance or a batch of instances. + ROLLED_BACK (6): + The instance was created successfully, but + was rolled back and deleted due to some other + failure during BatchCreateInstances operation. + """ + STATE_UNSPECIFIED = 0 + PENDING_CREATE = 1 + READY = 2 + CREATING = 3 + DELETING = 4 + FAILED = 5 + ROLLED_BACK = 6 + + state: State = proto.Field( + proto.ENUM, + number=1, + enum=State, + ) + error_msg: str = proto.Field( + proto.STRING, + number=2, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + type_: resources.Instance.InstanceType = proto.Field( + proto.ENUM, + number=3, + enum=resources.Instance.InstanceType, + ) + + +class UpdateInstanceRequest(proto.Message): + r"""Message for updating a Instance + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Instance resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + instance (google.cloud.alloydb_v1alpha.types.Instance): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + update request. + allow_missing (bool): + Optional. If set to true, update succeeds even if instance + is not found. In that case, a new instance is created and + ``update_mask`` is ignored. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + instance: resources.Instance = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Instance, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteInstanceRequest(proto.Message): + r"""Message for deleting a Instance + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Instance.name field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + etag (str): + Optional. The current etag of the Instance. + If an etag is provided and does not match the + current etag of the Instance, deletion will be + blocked and an ABORTED error will be returned. + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class FailoverInstanceRequest(proto.Message): + r"""Message for triggering failover on an Instance + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Instance.name field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + failover. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class RestartInstanceRequest(proto.Message): + r""" + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Instance.name field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + restart. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ListBackupsRequest(proto.Message): + r"""Message for requesting list of Backups + + Attributes: + parent (str): + Required. Parent value for ListBackupsRequest + page_size (int): + Requested page size. Server may return fewer + items than requested. If unspecified, server + will pick an appropriate default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Filtering results + order_by (str): + Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListBackupsResponse(proto.Message): + r"""Message for response to listing Backups + + Attributes: + backups (MutableSequence[google.cloud.alloydb_v1alpha.types.Backup]): + The list of Backup + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backups: MutableSequence[resources.Backup] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Backup, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupRequest(proto.Message): + r"""Message for getting a Backup + + Attributes: + name (str): + Required. Name of the resource + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateBackupRequest(proto.Message): + r"""Message for creating a Backup + + Attributes: + parent (str): + Required. Value for parent. + backup_id (str): + Required. ID of the requesting object. + backup (google.cloud.alloydb_v1alpha.types.Backup): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, the backend validates the + request, but doesn't actually execute it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup: resources.Backup = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Backup, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class UpdateBackupRequest(proto.Message): + r"""Message for updating a Backup + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The fields + specified in the update_mask are relative to the resource, + not the full request. A field will be overwritten if it is + in the mask. If the user does not provide a mask then all + fields will be overwritten. + backup (google.cloud.alloydb_v1alpha.types.Backup): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, the backend validates the + request, but doesn't actually execute it. + allow_missing (bool): + Optional. If set to true, update succeeds even if instance + is not found. In that case, a new backup is created and + ``update_mask`` is ignored. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + backup: resources.Backup = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Backup, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteBackupRequest(proto.Message): + r"""Message for deleting a Backup + + Attributes: + name (str): + Required. Name of the resource. For the + required format, see the comment on the + Backup.name field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, the backend validates the + request, but doesn't actually execute it. + etag (str): + Optional. The current etag of the Backup. + If an etag is provided and does not match the + current etag of the Backup, deletion will be + blocked and an ABORTED error will be returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSupportedDatabaseFlagsRequest(proto.Message): + r"""Message for listing the information about the supported + Database flags. + + Attributes: + parent (str): + Required. The name of the parent resource. The required + format is: + + - projects/{project}/locations/{location} + + Regardless of the parent specified here, as long it is + contains a valid project and location, the service will + return a static list of supported flags resources. Note that + we do not yet support region-specific flags. + page_size (int): + Requested page size. Server may return fewer + items than requested. If unspecified, server + will pick an appropriate default. + page_token (str): + A token identifying a page of results the + server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSupportedDatabaseFlagsResponse(proto.Message): + r"""Message for response to listing SupportedDatabaseFlags. + + Attributes: + supported_database_flags (MutableSequence[google.cloud.alloydb_v1alpha.types.SupportedDatabaseFlag]): + The list of SupportedDatabaseFlags. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + supported_database_flags: MutableSequence[ + resources.SupportedDatabaseFlag + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.SupportedDatabaseFlag, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GenerateClientCertificateRequest(proto.Message): + r"""Message for requests to generate a client certificate signed + by the Cluster CA. + + Attributes: + parent (str): + Required. The name of the parent resource. The required + format is: + + - projects/{project}/locations/{location}/clusters/{cluster} + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + pem_csr (str): + Optional. A pem-encoded X.509 certificate + signing request (CSR). + cert_duration (google.protobuf.duration_pb2.Duration): + Optional. An optional hint to the endpoint to + generate the client certificate with the + requested duration. The duration can be from 1 + hour to 24 hours. The endpoint may or may not + honor the hint. If the hint is left unspecified + or is not honored, then the endpoint will pick + an appropriate default duration. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + pem_csr: str = proto.Field( + proto.STRING, + number=3, + ) + cert_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + + +class GenerateClientCertificateResponse(proto.Message): + r"""Message returned by a GenerateClientCertificate operation. + + Attributes: + pem_certificate (str): + Output only. The pem-encoded, signed X.509 + certificate. + pem_certificate_chain (MutableSequence[str]): + Output only. The pem-encoded chain that may + be used to verify the X.509 certificate. + Expected to be in issuer-to-root order according + to RFC 5246. + """ + + pem_certificate: str = proto.Field( + proto.STRING, + number=1, + ) + pem_certificate_chain: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class GetConnectionInfoRequest(proto.Message): + r"""Request message for GetConnectionInfo. + + Attributes: + parent (str): + Required. The name of the parent resource. + The required format is: + projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance} + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + batch_create_instances_metadata (google.cloud.alloydb_v1alpha.types.BatchCreateInstancesMetadata): + Output only. BatchCreateInstances related + metadata. + + This field is a member of `oneof`_ ``request_specific``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + batch_create_instances_metadata: "BatchCreateInstancesMetadata" = proto.Field( + proto.MESSAGE, + number=8, + oneof="request_specific", + message="BatchCreateInstancesMetadata", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py new file mode 100644 index 000000000000..0e95ae982ae2 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.alloydb_v1beta import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.alloy_db_admin import AlloyDBAdminAsyncClient, AlloyDBAdminClient +from .types.resources import ( + AutomatedBackupPolicy, + Backup, + BackupSource, + Cluster, + ConnectionInfo, + ContinuousBackupConfig, + ContinuousBackupInfo, + ContinuousBackupSource, + DatabaseVersion, + EncryptionConfig, + EncryptionInfo, + Instance, + InstanceView, + MigrationSource, + SslConfig, + SupportedDatabaseFlag, + UserPassword, +) +from .types.service import ( + BatchCreateInstancesMetadata, + BatchCreateInstancesRequest, + BatchCreateInstancesResponse, + BatchCreateInstanceStatus, + CreateBackupRequest, + CreateClusterRequest, + CreateInstanceRequest, + CreateInstanceRequests, + CreateSecondaryClusterRequest, + CreateSecondaryInstanceRequest, + DeleteBackupRequest, + DeleteClusterRequest, + DeleteInstanceRequest, + FailoverInstanceRequest, + GenerateClientCertificateRequest, + GenerateClientCertificateResponse, + GetBackupRequest, + GetClusterRequest, + GetConnectionInfoRequest, + GetInstanceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListClustersRequest, + ListClustersResponse, + ListInstancesRequest, + ListInstancesResponse, + ListSupportedDatabaseFlagsRequest, + ListSupportedDatabaseFlagsResponse, + OperationMetadata, + PromoteClusterRequest, + RestartInstanceRequest, + RestoreClusterRequest, + UpdateBackupRequest, + UpdateClusterRequest, + UpdateInstanceRequest, +) + +__all__ = ( + "AlloyDBAdminAsyncClient", + "AlloyDBAdminClient", + "AutomatedBackupPolicy", + "Backup", + "BackupSource", + "BatchCreateInstanceStatus", + "BatchCreateInstancesMetadata", + "BatchCreateInstancesRequest", + "BatchCreateInstancesResponse", + "Cluster", + "ConnectionInfo", + "ContinuousBackupConfig", + "ContinuousBackupInfo", + "ContinuousBackupSource", + "CreateBackupRequest", + "CreateClusterRequest", + "CreateInstanceRequest", + "CreateInstanceRequests", + "CreateSecondaryClusterRequest", + "CreateSecondaryInstanceRequest", + "DatabaseVersion", + "DeleteBackupRequest", + "DeleteClusterRequest", + "DeleteInstanceRequest", + "EncryptionConfig", + "EncryptionInfo", + "FailoverInstanceRequest", + "GenerateClientCertificateRequest", + "GenerateClientCertificateResponse", + "GetBackupRequest", + "GetClusterRequest", + "GetConnectionInfoRequest", + "GetInstanceRequest", + "Instance", + "InstanceView", + "ListBackupsRequest", + "ListBackupsResponse", + "ListClustersRequest", + "ListClustersResponse", + "ListInstancesRequest", + "ListInstancesResponse", + "ListSupportedDatabaseFlagsRequest", + "ListSupportedDatabaseFlagsResponse", + "MigrationSource", + "OperationMetadata", + "PromoteClusterRequest", + "RestartInstanceRequest", + "RestoreClusterRequest", + "SslConfig", + "SupportedDatabaseFlag", + "UpdateBackupRequest", + "UpdateClusterRequest", + "UpdateInstanceRequest", + "UserPassword", +) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_metadata.json b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_metadata.json new file mode 100644 index 000000000000..52fdd0d7ad94 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_metadata.json @@ -0,0 +1,403 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.alloydb_v1beta", + "protoPackage": "google.cloud.alloydb.v1beta", + "schema": "1.0", + "services": { + "AlloyDBAdmin": { + "clients": { + "grpc": { + "libraryClient": "AlloyDBAdminClient", + "rpcs": { + "BatchCreateInstances": { + "methods": [ + "batch_create_instances" + ] + }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "CreateSecondaryCluster": { + "methods": [ + "create_secondary_cluster" + ] + }, + "CreateSecondaryInstance": { + "methods": [ + "create_secondary_instance" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "FailoverInstance": { + "methods": [ + "failover_instance" + ] + }, + "GenerateClientCertificate": { + "methods": [ + "generate_client_certificate" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetConnectionInfo": { + "methods": [ + "get_connection_info" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "ListSupportedDatabaseFlags": { + "methods": [ + "list_supported_database_flags" + ] + }, + "PromoteCluster": { + "methods": [ + "promote_cluster" + ] + }, + "RestartInstance": { + "methods": [ + "restart_instance" + ] + }, + "RestoreCluster": { + "methods": [ + "restore_cluster" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AlloyDBAdminAsyncClient", + "rpcs": { + "BatchCreateInstances": { + "methods": [ + "batch_create_instances" + ] + }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "CreateSecondaryCluster": { + "methods": [ + "create_secondary_cluster" + ] + }, + "CreateSecondaryInstance": { + "methods": [ + "create_secondary_instance" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "FailoverInstance": { + "methods": [ + "failover_instance" + ] + }, + "GenerateClientCertificate": { + "methods": [ + "generate_client_certificate" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetConnectionInfo": { + "methods": [ + "get_connection_info" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "ListSupportedDatabaseFlags": { + "methods": [ + "list_supported_database_flags" + ] + }, + "PromoteCluster": { + "methods": [ + "promote_cluster" + ] + }, + "RestartInstance": { + "methods": [ + "restart_instance" + ] + }, + "RestoreCluster": { + "methods": [ + "restore_cluster" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + }, + "rest": { + "libraryClient": "AlloyDBAdminClient", + "rpcs": { + "BatchCreateInstances": { + "methods": [ + "batch_create_instances" + ] + }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "CreateSecondaryCluster": { + "methods": [ + "create_secondary_cluster" + ] + }, + "CreateSecondaryInstance": { + "methods": [ + "create_secondary_instance" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "FailoverInstance": { + "methods": [ + "failover_instance" + ] + }, + "GenerateClientCertificate": { + "methods": [ + "generate_client_certificate" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetConnectionInfo": { + "methods": [ + "get_connection_info" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "ListSupportedDatabaseFlags": { + "methods": [ + "list_supported_database_flags" + ] + }, + "PromoteCluster": { + "methods": [ + "promote_cluster" + ] + }, + "RestartInstance": { + "methods": [ + "restart_instance" + ] + }, + "RestoreCluster": { + "methods": [ + "restore_cluster" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py new file mode 100644 index 000000000000..1b461870ca63 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/py.typed b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/py.typed new file mode 100644 index 000000000000..c71d0f20b482 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-alloydb package uses inline types. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/__init__.py new file mode 100644 index 000000000000..1acb07113321 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AlloyDBAdminAsyncClient +from .client import AlloyDBAdminClient + +__all__ = ( + "AlloyDBAdminClient", + "AlloyDBAdminAsyncClient", +) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py new file mode 100644 index 000000000000..7cf927417c88 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py @@ -0,0 +1,3722 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.alloydb_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.alloydb_v1beta.services.alloy_db_admin import pagers +from google.cloud.alloydb_v1beta.types import resources, service + +from .client import AlloyDBAdminClient +from .transports.base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport +from .transports.grpc_asyncio import AlloyDBAdminGrpcAsyncIOTransport + + +class AlloyDBAdminAsyncClient: + """Service describing handlers for resources""" + + _client: AlloyDBAdminClient + + DEFAULT_ENDPOINT = AlloyDBAdminClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AlloyDBAdminClient.DEFAULT_MTLS_ENDPOINT + + backup_path = staticmethod(AlloyDBAdminClient.backup_path) + parse_backup_path = staticmethod(AlloyDBAdminClient.parse_backup_path) + cluster_path = staticmethod(AlloyDBAdminClient.cluster_path) + parse_cluster_path = staticmethod(AlloyDBAdminClient.parse_cluster_path) + connection_info_path = staticmethod(AlloyDBAdminClient.connection_info_path) + parse_connection_info_path = staticmethod( + AlloyDBAdminClient.parse_connection_info_path + ) + crypto_key_version_path = staticmethod(AlloyDBAdminClient.crypto_key_version_path) + parse_crypto_key_version_path = staticmethod( + AlloyDBAdminClient.parse_crypto_key_version_path + ) + instance_path = staticmethod(AlloyDBAdminClient.instance_path) + parse_instance_path = staticmethod(AlloyDBAdminClient.parse_instance_path) + network_path = staticmethod(AlloyDBAdminClient.network_path) + parse_network_path = staticmethod(AlloyDBAdminClient.parse_network_path) + supported_database_flag_path = staticmethod( + AlloyDBAdminClient.supported_database_flag_path + ) + parse_supported_database_flag_path = staticmethod( + AlloyDBAdminClient.parse_supported_database_flag_path + ) + common_billing_account_path = staticmethod( + AlloyDBAdminClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AlloyDBAdminClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AlloyDBAdminClient.common_folder_path) + parse_common_folder_path = staticmethod(AlloyDBAdminClient.parse_common_folder_path) + common_organization_path = staticmethod(AlloyDBAdminClient.common_organization_path) + parse_common_organization_path = staticmethod( + AlloyDBAdminClient.parse_common_organization_path + ) + common_project_path = staticmethod(AlloyDBAdminClient.common_project_path) + parse_common_project_path = staticmethod( + AlloyDBAdminClient.parse_common_project_path + ) + common_location_path = staticmethod(AlloyDBAdminClient.common_location_path) + parse_common_location_path = staticmethod( + AlloyDBAdminClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlloyDBAdminAsyncClient: The constructed client. + """ + return AlloyDBAdminClient.from_service_account_info.__func__(AlloyDBAdminAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlloyDBAdminAsyncClient: The constructed client. + """ + return AlloyDBAdminClient.from_service_account_file.__func__(AlloyDBAdminAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AlloyDBAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AlloyDBAdminTransport: + """Returns the transport used by the client instance. + + Returns: + AlloyDBAdminTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(AlloyDBAdminClient).get_transport_class, type(AlloyDBAdminClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, AlloyDBAdminTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the alloy db admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.AlloyDBAdminTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AlloyDBAdminClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_clusters( + self, + request: Optional[Union[service.ListClustersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListClustersAsyncPager: + r"""Lists Clusters in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_list_clusters(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.ListClustersRequest, dict]]): + The request object. Message for requesting list of + Clusters + parent (:class:`str`): + Required. The name of the parent resource. For the + required format, see the comment on the Cluster.name + field. Additionally, you can perform an aggregated list + operation by specifying a value with the following + format: + + - projects/{project}/locations/- + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListClustersAsyncPager: + Message for response to listing + Clusters + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListClustersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_clusters, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListClustersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_cluster( + self, + request: Optional[Union[service.GetClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Cluster: + r"""Gets details of a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_get_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.GetClusterRequest, dict]]): + The request object. Message for getting a Cluster + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.types.Cluster: + A cluster is a collection of regional + AlloyDB resources. It can include a + primary instance and one or more read + pool instances. All cluster resources + share a storage layer, which scales as + needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_cluster, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_cluster( + self, + request: Optional[Union[service.CreateClusterRequest, dict]] = None, + *, + parent: Optional[str] = None, + cluster: Optional[resources.Cluster] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Cluster in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_create_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.CreateClusterRequest, dict]]): + The request object. Message for creating a Cluster + parent (:class:`str`): + Required. The name of the parent + resource. For the required format, see + the comment on the Cluster.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (:class:`google.cloud.alloydb_v1beta.types.Cluster`): + Required. The resource being created + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. ID of the requesting + object. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cluster, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cluster is not None: + request.cluster = cluster + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_cluster( + self, + request: Optional[Union[service.UpdateClusterRequest, dict]] = None, + *, + cluster: Optional[resources.Cluster] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_update_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.UpdateClusterRequest, dict]]): + The request object. Message for updating a Cluster + cluster (:class:`google.cloud.alloydb_v1beta.types.Cluster`): + Required. The resource being updated + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the Cluster resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cluster, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.UpdateClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cluster is not None: + request.cluster = cluster + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("cluster.name", request.cluster.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_cluster( + self, + request: Optional[Union[service.DeleteClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_delete_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.DeleteClusterRequest, dict]]): + The request object. Message for deleting a Cluster + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.DeleteClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def promote_cluster( + self, + request: Optional[Union[service.PromoteClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Promotes a SECONDARY cluster. This turns down + replication from the PRIMARY cluster and promotes a + secondary cluster into its own standalone cluster. + Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_promote_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.PromoteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.promote_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.PromoteClusterRequest, dict]]): + The request object. Message for promoting a Cluster + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.PromoteClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.promote_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def restore_cluster( + self, + request: Optional[Union[service.RestoreClusterRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Cluster in a given project and + location, with a volume restored from the provided + source, either a backup ID or a point-in-time and a + source cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_restore_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup_source = alloydb_v1beta.BackupSource() + backup_source.backup_name = "backup_name_value" + + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.RestoreClusterRequest( + backup_source=backup_source, + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.restore_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.RestoreClusterRequest, dict]]): + The request object. Message for restoring a Cluster from + a backup or another cluster at a given + point in time. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + request = service.RestoreClusterRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.restore_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_secondary_cluster( + self, + request: Optional[Union[service.CreateSecondaryClusterRequest, dict]] = None, + *, + parent: Optional[str] = None, + cluster: Optional[resources.Cluster] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a cluster of type SECONDARY in the given + location using the primary cluster as the source. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_create_secondary_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.CreateSecondaryClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_secondary_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.CreateSecondaryClusterRequest, dict]]): + The request object. + parent (:class:`str`): + Required. The name of the parent + resource (the primary cluster). For the + required format, see the comment on the + Cluster.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (:class:`google.cloud.alloydb_v1beta.types.Cluster`): + Required. Configuration of the + requesting object (the secondary + cluster). + + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. ID of the requesting object + (the secondary cluster). + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cluster, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateSecondaryClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cluster is not None: + request.cluster = cluster + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_secondary_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_instances( + self, + request: Optional[Union[service.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesAsyncPager: + r"""Lists Instances in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_list_instances(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.ListInstancesRequest, dict]]): + The request object. Message for requesting list of + Instances + parent (:class:`str`): + Required. The name of the parent resource. For the + required format, see the comment on the Instance.name + field. Additionally, you can perform an aggregated list + operation by specifying a value with one of the + following formats: + + - projects/{project}/locations/-/clusters/- + - projects/{project}/locations/{region}/clusters/- + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListInstancesAsyncPager: + Message for response to listing + Instances + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_instances, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance( + self, + request: Optional[Union[service.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Instance: + r"""Gets details of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_get_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.GetInstanceRequest, dict]]): + The request object. Message for getting a Instance + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.types.Instance: + An Instance is a computing unit that + an end customer can connect to. It's the + main unit of computing resources in + AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_instance, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_instance( + self, + request: Optional[Union[service.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[resources.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Instance in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_create_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1beta.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.CreateInstanceRequest, dict]]): + The request object. Message for creating a Instance + parent (:class:`str`): + Required. The name of the parent + resource. For the required format, see + the comment on the Instance.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.alloydb_v1beta.types.Instance`): + Required. The resource being created + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. ID of the requesting + object. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_secondary_instance( + self, + request: Optional[Union[service.CreateSecondaryInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[resources.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new SECONDARY Instance in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_create_secondary_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1beta.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.CreateSecondaryInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_secondary_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.CreateSecondaryInstanceRequest, dict]]): + The request object. Message for creating a Secondary + Instance + parent (:class:`str`): + Required. The name of the parent + resource. For the required format, see + the comment on the Instance.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.alloydb_v1beta.types.Instance`): + Required. The resource being created + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. ID of the requesting + object. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateSecondaryInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_secondary_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def batch_create_instances( + self, + request: Optional[Union[service.BatchCreateInstancesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates new instances under the given project, + location and cluster. There can be only one primary + instance in a cluster. If the primary instance exists in + the cluster as well as this request, then API will throw + an error. + The primary instance should exist before any read pool + instance is created. If the primary instance is a part + of the request payload, then the API will take care of + creating instances in the correct order. This method is + here to support Google-internal use cases, and is not + meant for external customers to consume. Please do not + start relying on it; its behavior is subject to change + without notice. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_batch_create_instances(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + requests = alloydb_v1beta.CreateInstanceRequests() + requests.create_instance_requests.parent = "parent_value" + requests.create_instance_requests.instance_id = "instance_id_value" + requests.create_instance_requests.instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.BatchCreateInstancesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_instances(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.BatchCreateInstancesRequest, dict]]): + The request object. Message for creating a batch of + instances under the specified cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1beta.types.BatchCreateInstancesResponse` + Message for creating batches of instances in a cluster. + + """ + # Create or coerce a protobuf request object. + request = service.BatchCreateInstancesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_create_instances, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + service.BatchCreateInstancesResponse, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_instance( + self, + request: Optional[Union[service.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[resources.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_update_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1beta.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.UpdateInstanceRequest, dict]]): + The request object. Message for updating a Instance + instance (:class:`google.cloud.alloydb_v1beta.types.Instance`): + Required. The resource being updated + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the Instance resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.UpdateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance( + self, + request: Optional[Union[service.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_delete_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.DeleteInstanceRequest, dict]]): + The request object. Message for deleting a Instance + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def failover_instance( + self, + request: Optional[Union[service.FailoverInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Forces a Failover for a highly available instance. + Failover promotes the HA standby instance as the new + primary. Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_failover_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.FailoverInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.FailoverInstanceRequest, dict]]): + The request object. Message for triggering failover on an + Instance + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.FailoverInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.failover_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def restart_instance( + self, + request: Optional[Union[service.RestartInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Restart an Instance in a cluster. + Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_restart_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.RestartInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.restart_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.RestartInstanceRequest, dict]]): + The request object. + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.RestartInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.restart_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_backups( + self, + request: Optional[Union[service.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsAsyncPager: + r"""Lists Backups in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_list_backups(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.ListBackupsRequest, dict]]): + The request object. Message for requesting list of + Backups + parent (:class:`str`): + Required. Parent value for + ListBackupsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListBackupsAsyncPager: + Message for response to listing + Backups + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListBackupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup( + self, + request: Optional[Union[service.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Backup: + r"""Gets details of a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_get_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.GetBackupRequest, dict]]): + The request object. Message for getting a Backup + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.types.Backup: + Message describing Backup object + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_backup( + self, + request: Optional[Union[service.CreateBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup: Optional[resources.Backup] = None, + backup_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Backup in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_create_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup = alloydb_v1beta.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1beta.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + backup=backup, + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.CreateBackupRequest, dict]]): + The request object. Message for creating a Backup + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (:class:`google.cloud.alloydb_v1beta.types.Backup`): + Required. The resource being created + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (:class:`str`): + Required. ID of the requesting + object. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1beta.types.Backup` + Message describing Backup object + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup, backup_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_backup, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Backup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_backup( + self, + request: Optional[Union[service.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[resources.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_update_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup = alloydb_v1beta.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1beta.UpdateBackupRequest( + backup=backup, + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.UpdateBackupRequest, dict]]): + The request object. Message for updating a Backup + backup (:class:`google.cloud.alloydb_v1beta.types.Backup`): + Required. The resource being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1beta.types.Backup` + Message describing Backup object + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.UpdateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_backup, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Backup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_backup( + self, + request: Optional[Union[service.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_delete_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.DeleteBackupRequest, dict]]): + The request object. Message for deleting a Backup + name (:class:`str`): + Required. Name of the resource. For + the required format, see the comment on + the Backup.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.DeleteBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_backup, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_supported_database_flags( + self, + request: Optional[ + Union[service.ListSupportedDatabaseFlagsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSupportedDatabaseFlagsAsyncPager: + r"""Lists SupportedDatabaseFlags for a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_list_supported_database_flags(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListSupportedDatabaseFlagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_supported_database_flags(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.ListSupportedDatabaseFlagsRequest, dict]]): + The request object. Message for listing the information + about the supported Database flags. + parent (:class:`str`): + Required. The name of the parent resource. The required + format is: + + - projects/{project}/locations/{location} + + Regardless of the parent specified here, as long it is + contains a valid project and location, the service will + return a static list of supported flags resources. Note + that we do not yet support region-specific flags. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListSupportedDatabaseFlagsAsyncPager: + Message for response to listing + SupportedDatabaseFlags. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListSupportedDatabaseFlagsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_supported_database_flags, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSupportedDatabaseFlagsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def generate_client_certificate( + self, + request: Optional[Union[service.GenerateClientCertificateRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.GenerateClientCertificateResponse: + r"""Generate a client certificate signed by a Cluster CA. + The sole purpose of this endpoint is to support the Auth + Proxy client and the endpoint's behavior is subject to + change without notice, so do not rely on its behavior + remaining constant. Future changes will not break the + Auth Proxy client. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_generate_client_certificate(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GenerateClientCertificateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.generate_client_certificate(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.GenerateClientCertificateRequest, dict]]): + The request object. Message for requests to generate a + client certificate signed by the Cluster + CA. + parent (:class:`str`): + Required. The name of the parent resource. The required + format is: + + - projects/{project}/locations/{location}/clusters/{cluster} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.types.GenerateClientCertificateResponse: + Message returned by a + GenerateClientCertificate operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GenerateClientCertificateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_client_certificate, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_connection_info( + self, + request: Optional[Union[service.GetConnectionInfoRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ConnectionInfo: + r"""Get instance metadata used for a connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_get_connection_info(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetConnectionInfoRequest( + parent="parent_value", + ) + + # Make the request + response = await client.get_connection_info(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.GetConnectionInfoRequest, dict]]): + The request object. Request message for + GetConnectionInfo. + parent (:class:`str`): + Required. The name of the parent + resource. The required format is: + projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.types.ConnectionInfo: + ConnectionInfo singleton resource. + https://google.aip.dev/156 + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetConnectionInfoRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_connection_info, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AlloyDBAdminAsyncClient",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py new file mode 100644 index 000000000000..7b5167fc1754 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py @@ -0,0 +1,4004 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.alloydb_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.alloydb_v1beta.services.alloy_db_admin import pagers +from google.cloud.alloydb_v1beta.types import resources, service + +from .transports.base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport +from .transports.grpc import AlloyDBAdminGrpcTransport +from .transports.grpc_asyncio import AlloyDBAdminGrpcAsyncIOTransport +from .transports.rest import AlloyDBAdminRestTransport + + +class AlloyDBAdminClientMeta(type): + """Metaclass for the AlloyDBAdmin client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[AlloyDBAdminTransport]] + _transport_registry["grpc"] = AlloyDBAdminGrpcTransport + _transport_registry["grpc_asyncio"] = AlloyDBAdminGrpcAsyncIOTransport + _transport_registry["rest"] = AlloyDBAdminRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AlloyDBAdminTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AlloyDBAdminClient(metaclass=AlloyDBAdminClientMeta): + """Service describing handlers for resources""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "alloydb.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlloyDBAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AlloyDBAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AlloyDBAdminTransport: + """Returns the transport used by the client instance. + + Returns: + AlloyDBAdminTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def backup_path( + project: str, + location: str, + backup: str, + ) -> str: + """Returns a fully-qualified backup string.""" + return "projects/{project}/locations/{location}/backups/{backup}".format( + project=project, + location=location, + backup=backup, + ) + + @staticmethod + def parse_backup_path(path: str) -> Dict[str, str]: + """Parses a backup path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def cluster_path( + project: str, + location: str, + cluster: str, + ) -> str: + """Returns a fully-qualified cluster string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, + location=location, + cluster=cluster, + ) + + @staticmethod + def parse_cluster_path(path: str) -> Dict[str, str]: + """Parses a cluster path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def connection_info_path( + project: str, + location: str, + cluster: str, + instance: str, + ) -> str: + """Returns a fully-qualified connection_info string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance}/connectionInfo".format( + project=project, + location=location, + cluster=cluster, + instance=instance, + ) + + @staticmethod + def parse_connection_info_path(path: str) -> Dict[str, str]: + """Parses a connection_info path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)/instances/(?P.+?)/connectionInfo$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def crypto_key_version_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + crypto_key_version: str, + ) -> str: + """Returns a fully-qualified crypto_key_version string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + crypto_key_version=crypto_key_version, + ) + + @staticmethod + def parse_crypto_key_version_path(path: str) -> Dict[str, str]: + """Parses a crypto_key_version path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)/cryptoKeyVersions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def instance_path( + project: str, + location: str, + cluster: str, + instance: str, + ) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance}".format( + project=project, + location=location, + cluster=cluster, + instance=instance, + ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str, str]: + """Parses a instance path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)/instances/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def network_path( + project: str, + network: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def supported_database_flag_path( + project: str, + location: str, + flag: str, + ) -> str: + """Returns a fully-qualified supported_database_flag string.""" + return "projects/{project}/locations/{location}/flags/{flag}".format( + project=project, + location=location, + flag=flag, + ) + + @staticmethod + def parse_supported_database_flag_path(path: str) -> Dict[str, str]: + """Parses a supported_database_flag path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/flags/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AlloyDBAdminTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the alloy db admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, AlloyDBAdminTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, AlloyDBAdminTransport): + # transport is a AlloyDBAdminTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_clusters( + self, + request: Optional[Union[service.ListClustersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListClustersPager: + r"""Lists Clusters in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_list_clusters(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.ListClustersRequest, dict]): + The request object. Message for requesting list of + Clusters + parent (str): + Required. The name of the parent resource. For the + required format, see the comment on the Cluster.name + field. Additionally, you can perform an aggregated list + operation by specifying a value with the following + format: + + - projects/{project}/locations/- + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListClustersPager: + Message for response to listing + Clusters + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListClustersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListClustersRequest): + request = service.ListClustersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_clusters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListClustersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_cluster( + self, + request: Optional[Union[service.GetClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Cluster: + r"""Gets details of a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_get_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.GetClusterRequest, dict]): + The request object. Message for getting a Cluster + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.types.Cluster: + A cluster is a collection of regional + AlloyDB resources. It can include a + primary instance and one or more read + pool instances. All cluster resources + share a storage layer, which scales as + needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetClusterRequest): + request = service.GetClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_cluster( + self, + request: Optional[Union[service.CreateClusterRequest, dict]] = None, + *, + parent: Optional[str] = None, + cluster: Optional[resources.Cluster] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Cluster in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_create_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.CreateClusterRequest, dict]): + The request object. Message for creating a Cluster + parent (str): + Required. The name of the parent + resource. For the required format, see + the comment on the Cluster.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (google.cloud.alloydb_v1beta.types.Cluster): + Required. The resource being created + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. ID of the requesting + object. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cluster, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateClusterRequest): + request = service.CreateClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cluster is not None: + request.cluster = cluster + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_cluster( + self, + request: Optional[Union[service.UpdateClusterRequest, dict]] = None, + *, + cluster: Optional[resources.Cluster] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_update_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.UpdateClusterRequest, dict]): + The request object. Message for updating a Cluster + cluster (google.cloud.alloydb_v1beta.types.Cluster): + Required. The resource being updated + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Cluster resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cluster, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateClusterRequest): + request = service.UpdateClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cluster is not None: + request.cluster = cluster + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("cluster.name", request.cluster.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_cluster( + self, + request: Optional[Union[service.DeleteClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_delete_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.DeleteClusterRequest, dict]): + The request object. Message for deleting a Cluster + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteClusterRequest): + request = service.DeleteClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def promote_cluster( + self, + request: Optional[Union[service.PromoteClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Promotes a SECONDARY cluster. This turns down + replication from the PRIMARY cluster and promotes a + secondary cluster into its own standalone cluster. + Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_promote_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.PromoteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.promote_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.PromoteClusterRequest, dict]): + The request object. Message for promoting a Cluster + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.PromoteClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.PromoteClusterRequest): + request = service.PromoteClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.promote_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def restore_cluster( + self, + request: Optional[Union[service.RestoreClusterRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Cluster in a given project and + location, with a volume restored from the provided + source, either a backup ID or a point-in-time and a + source cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_restore_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + backup_source = alloydb_v1beta.BackupSource() + backup_source.backup_name = "backup_name_value" + + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.RestoreClusterRequest( + backup_source=backup_source, + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.restore_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.RestoreClusterRequest, dict]): + The request object. Message for restoring a Cluster from + a backup or another cluster at a given + point in time. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.RestoreClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.RestoreClusterRequest): + request = service.RestoreClusterRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_secondary_cluster( + self, + request: Optional[Union[service.CreateSecondaryClusterRequest, dict]] = None, + *, + parent: Optional[str] = None, + cluster: Optional[resources.Cluster] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a cluster of type SECONDARY in the given + location using the primary cluster as the source. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_create_secondary_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.CreateSecondaryClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_secondary_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.CreateSecondaryClusterRequest, dict]): + The request object. + parent (str): + Required. The name of the parent + resource (the primary cluster). For the + required format, see the comment on the + Cluster.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (google.cloud.alloydb_v1beta.types.Cluster): + Required. Configuration of the + requesting object (the secondary + cluster). + + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. ID of the requesting object + (the secondary cluster). + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cluster, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateSecondaryClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateSecondaryClusterRequest): + request = service.CreateSecondaryClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cluster is not None: + request.cluster = cluster + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_secondary_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_instances( + self, + request: Optional[Union[service.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists Instances in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_list_instances(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.ListInstancesRequest, dict]): + The request object. Message for requesting list of + Instances + parent (str): + Required. The name of the parent resource. For the + required format, see the comment on the Instance.name + field. Additionally, you can perform an aggregated list + operation by specifying a value with one of the + following formats: + + - projects/{project}/locations/-/clusters/- + - projects/{project}/locations/{region}/clusters/- + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListInstancesPager: + Message for response to listing + Instances + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListInstancesRequest): + request = service.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance( + self, + request: Optional[Union[service.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Instance: + r"""Gets details of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_get_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.GetInstanceRequest, dict]): + The request object. Message for getting a Instance + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.types.Instance: + An Instance is a computing unit that + an end customer can connect to. It's the + main unit of computing resources in + AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetInstanceRequest): + request = service.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance( + self, + request: Optional[Union[service.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[resources.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Instance in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_create_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1beta.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.CreateInstanceRequest, dict]): + The request object. Message for creating a Instance + parent (str): + Required. The name of the parent + resource. For the required format, see + the comment on the Instance.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.alloydb_v1beta.types.Instance): + Required. The resource being created + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. ID of the requesting + object. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateInstanceRequest): + request = service.CreateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_secondary_instance( + self, + request: Optional[Union[service.CreateSecondaryInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[resources.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new SECONDARY Instance in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_create_secondary_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1beta.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.CreateSecondaryInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_secondary_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.CreateSecondaryInstanceRequest, dict]): + The request object. Message for creating a Secondary + Instance + parent (str): + Required. The name of the parent + resource. For the required format, see + the comment on the Instance.name field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.alloydb_v1beta.types.Instance): + Required. The resource being created + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. ID of the requesting + object. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateSecondaryInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateSecondaryInstanceRequest): + request = service.CreateSecondaryInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_secondary_instance + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def batch_create_instances( + self, + request: Optional[Union[service.BatchCreateInstancesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates new instances under the given project, + location and cluster. There can be only one primary + instance in a cluster. If the primary instance exists in + the cluster as well as this request, then API will throw + an error. + The primary instance should exist before any read pool + instance is created. If the primary instance is a part + of the request payload, then the API will take care of + creating instances in the correct order. This method is + here to support Google-internal use cases, and is not + meant for external customers to consume. Please do not + start relying on it; its behavior is subject to change + without notice. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_batch_create_instances(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + requests = alloydb_v1beta.CreateInstanceRequests() + requests.create_instance_requests.parent = "parent_value" + requests.create_instance_requests.instance_id = "instance_id_value" + requests.create_instance_requests.instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.BatchCreateInstancesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_instances(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.BatchCreateInstancesRequest, dict]): + The request object. Message for creating a batch of + instances under the specified cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1beta.types.BatchCreateInstancesResponse` + Message for creating batches of instances in a cluster. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.BatchCreateInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.BatchCreateInstancesRequest): + request = service.BatchCreateInstancesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_create_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + service.BatchCreateInstancesResponse, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_instance( + self, + request: Optional[Union[service.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[resources.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_update_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1beta.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.UpdateInstanceRequest, dict]): + The request object. Message for updating a Instance + instance (google.cloud.alloydb_v1beta.types.Instance): + Required. The resource being updated + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Instance resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateInstanceRequest): + request = service.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_instance( + self, + request: Optional[Union[service.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_delete_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.DeleteInstanceRequest, dict]): + The request object. Message for deleting a Instance + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteInstanceRequest): + request = service.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def failover_instance( + self, + request: Optional[Union[service.FailoverInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Forces a Failover for a highly available instance. + Failover promotes the HA standby instance as the new + primary. Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_failover_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.FailoverInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.FailoverInstanceRequest, dict]): + The request object. Message for triggering failover on an + Instance + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.FailoverInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.FailoverInstanceRequest): + request = service.FailoverInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.failover_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def restart_instance( + self, + request: Optional[Union[service.RestartInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Restart an Instance in a cluster. + Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_restart_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.RestartInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.restart_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.RestartInstanceRequest, dict]): + The request object. + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Instance.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Instance` An Instance is a computing unit that an end customer can connect to. + It's the main unit of computing resources in AlloyDB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.RestartInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.RestartInstanceRequest): + request = service.RestartInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restart_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_backups( + self, + request: Optional[Union[service.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsPager: + r"""Lists Backups in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_list_backups(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.ListBackupsRequest, dict]): + The request object. Message for requesting list of + Backups + parent (str): + Required. Parent value for + ListBackupsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListBackupsPager: + Message for response to listing + Backups + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListBackupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListBackupsRequest): + request = service.ListBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup( + self, + request: Optional[Union[service.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Backup: + r"""Gets details of a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_get_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.GetBackupRequest, dict]): + The request object. Message for getting a Backup + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.types.Backup: + Message describing Backup object + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetBackupRequest): + request = service.GetBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_backup( + self, + request: Optional[Union[service.CreateBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup: Optional[resources.Backup] = None, + backup_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Backup in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_create_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + backup = alloydb_v1beta.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1beta.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + backup=backup, + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.CreateBackupRequest, dict]): + The request object. Message for creating a Backup + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (google.cloud.alloydb_v1beta.types.Backup): + Required. The resource being created + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (str): + Required. ID of the requesting + object. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1beta.types.Backup` + Message describing Backup object + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup, backup_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateBackupRequest): + request = service.CreateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Backup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_backup( + self, + request: Optional[Union[service.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[resources.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_update_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + backup = alloydb_v1beta.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1beta.UpdateBackupRequest( + backup=backup, + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.UpdateBackupRequest, dict]): + The request object. Message for updating a Backup + backup (google.cloud.alloydb_v1beta.types.Backup): + Required. The resource being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1beta.types.Backup` + Message describing Backup object + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateBackupRequest): + request = service.UpdateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Backup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_backup( + self, + request: Optional[Union[service.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_delete_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.DeleteBackupRequest, dict]): + The request object. Message for deleting a Backup + name (str): + Required. Name of the resource. For + the required format, see the comment on + the Backup.name field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteBackupRequest): + request = service.DeleteBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_supported_database_flags( + self, + request: Optional[ + Union[service.ListSupportedDatabaseFlagsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSupportedDatabaseFlagsPager: + r"""Lists SupportedDatabaseFlags for a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_list_supported_database_flags(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListSupportedDatabaseFlagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_supported_database_flags(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.ListSupportedDatabaseFlagsRequest, dict]): + The request object. Message for listing the information + about the supported Database flags. + parent (str): + Required. The name of the parent resource. The required + format is: + + - projects/{project}/locations/{location} + + Regardless of the parent specified here, as long it is + contains a valid project and location, the service will + return a static list of supported flags resources. Note + that we do not yet support region-specific flags. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListSupportedDatabaseFlagsPager: + Message for response to listing + SupportedDatabaseFlags. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListSupportedDatabaseFlagsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListSupportedDatabaseFlagsRequest): + request = service.ListSupportedDatabaseFlagsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_supported_database_flags + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSupportedDatabaseFlagsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def generate_client_certificate( + self, + request: Optional[Union[service.GenerateClientCertificateRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.GenerateClientCertificateResponse: + r"""Generate a client certificate signed by a Cluster CA. + The sole purpose of this endpoint is to support the Auth + Proxy client and the endpoint's behavior is subject to + change without notice, so do not rely on its behavior + remaining constant. Future changes will not break the + Auth Proxy client. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_generate_client_certificate(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GenerateClientCertificateRequest( + parent="parent_value", + ) + + # Make the request + response = client.generate_client_certificate(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.GenerateClientCertificateRequest, dict]): + The request object. Message for requests to generate a + client certificate signed by the Cluster + CA. + parent (str): + Required. The name of the parent resource. The required + format is: + + - projects/{project}/locations/{location}/clusters/{cluster} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.types.GenerateClientCertificateResponse: + Message returned by a + GenerateClientCertificate operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GenerateClientCertificateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GenerateClientCertificateRequest): + request = service.GenerateClientCertificateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.generate_client_certificate + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_connection_info( + self, + request: Optional[Union[service.GetConnectionInfoRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ConnectionInfo: + r"""Get instance metadata used for a connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_get_connection_info(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetConnectionInfoRequest( + parent="parent_value", + ) + + # Make the request + response = client.get_connection_info(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.GetConnectionInfoRequest, dict]): + The request object. Request message for + GetConnectionInfo. + parent (str): + Required. The name of the parent + resource. The required format is: + projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.types.ConnectionInfo: + ConnectionInfo singleton resource. + https://google.aip.dev/156 + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetConnectionInfoRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetConnectionInfoRequest): + request = service.GetConnectionInfoRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_connection_info] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AlloyDBAdminClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AlloyDBAdminClient",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/pagers.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/pagers.py new file mode 100644 index 000000000000..1c442de8074c --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/pagers.py @@ -0,0 +1,539 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.alloydb_v1beta.types import resources, service + + +class ListClustersPager: + """A pager for iterating through ``list_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1beta.types.ListClustersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``clusters`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListClusters`` requests and continue to iterate + through the ``clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1beta.types.ListClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListClustersResponse], + request: service.ListClustersRequest, + response: service.ListClustersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1beta.types.ListClustersRequest): + The initial request object. + response (google.cloud.alloydb_v1beta.types.ListClustersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListClustersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Cluster]: + for page in self.pages: + yield from page.clusters + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListClustersAsyncPager: + """A pager for iterating through ``list_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1beta.types.ListClustersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``clusters`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListClusters`` requests and continue to iterate + through the ``clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1beta.types.ListClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListClustersResponse]], + request: service.ListClustersRequest, + response: service.ListClustersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1beta.types.ListClustersRequest): + The initial request object. + response (google.cloud.alloydb_v1beta.types.ListClustersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListClustersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Cluster]: + async def async_generator(): + async for page in self.pages: + for response in page.clusters: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1beta.types.ListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1beta.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListInstancesResponse], + request: service.ListInstancesRequest, + response: service.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1beta.types.ListInstancesRequest): + The initial request object. + response (google.cloud.alloydb_v1beta.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Instance]: + for page in self.pages: + yield from page.instances + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstancesAsyncPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1beta.types.ListInstancesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1beta.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListInstancesResponse]], + request: service.ListInstancesRequest, + response: service.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1beta.types.ListInstancesRequest): + The initial request object. + response (google.cloud.alloydb_v1beta.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Instance]: + async def async_generator(): + async for page in self.pages: + for response in page.instances: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1beta.types.ListBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1beta.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListBackupsResponse], + request: service.ListBackupsRequest, + response: service.ListBackupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1beta.types.ListBackupsRequest): + The initial request object. + response (google.cloud.alloydb_v1beta.types.ListBackupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListBackupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Backup]: + for page in self.pages: + yield from page.backups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsAsyncPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1beta.types.ListBackupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1beta.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListBackupsResponse]], + request: service.ListBackupsRequest, + response: service.ListBackupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1beta.types.ListBackupsRequest): + The initial request object. + response (google.cloud.alloydb_v1beta.types.ListBackupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListBackupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Backup]: + async def async_generator(): + async for page in self.pages: + for response in page.backups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSupportedDatabaseFlagsPager: + """A pager for iterating through ``list_supported_database_flags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1beta.types.ListSupportedDatabaseFlagsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``supported_database_flags`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSupportedDatabaseFlags`` requests and continue to iterate + through the ``supported_database_flags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1beta.types.ListSupportedDatabaseFlagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListSupportedDatabaseFlagsResponse], + request: service.ListSupportedDatabaseFlagsRequest, + response: service.ListSupportedDatabaseFlagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1beta.types.ListSupportedDatabaseFlagsRequest): + The initial request object. + response (google.cloud.alloydb_v1beta.types.ListSupportedDatabaseFlagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSupportedDatabaseFlagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListSupportedDatabaseFlagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.SupportedDatabaseFlag]: + for page in self.pages: + yield from page.supported_database_flags + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSupportedDatabaseFlagsAsyncPager: + """A pager for iterating through ``list_supported_database_flags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1beta.types.ListSupportedDatabaseFlagsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``supported_database_flags`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSupportedDatabaseFlags`` requests and continue to iterate + through the ``supported_database_flags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1beta.types.ListSupportedDatabaseFlagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListSupportedDatabaseFlagsResponse]], + request: service.ListSupportedDatabaseFlagsRequest, + response: service.ListSupportedDatabaseFlagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1beta.types.ListSupportedDatabaseFlagsRequest): + The initial request object. + response (google.cloud.alloydb_v1beta.types.ListSupportedDatabaseFlagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSupportedDatabaseFlagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListSupportedDatabaseFlagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.SupportedDatabaseFlag]: + async def async_generator(): + async for page in self.pages: + for response in page.supported_database_flags: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/__init__.py new file mode 100644 index 000000000000..1e347e0f52b8 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AlloyDBAdminTransport +from .grpc import AlloyDBAdminGrpcTransport +from .grpc_asyncio import AlloyDBAdminGrpcAsyncIOTransport +from .rest import AlloyDBAdminRestInterceptor, AlloyDBAdminRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AlloyDBAdminTransport]] +_transport_registry["grpc"] = AlloyDBAdminGrpcTransport +_transport_registry["grpc_asyncio"] = AlloyDBAdminGrpcAsyncIOTransport +_transport_registry["rest"] = AlloyDBAdminRestTransport + +__all__ = ( + "AlloyDBAdminTransport", + "AlloyDBAdminGrpcTransport", + "AlloyDBAdminGrpcAsyncIOTransport", + "AlloyDBAdminRestTransport", + "AlloyDBAdminRestInterceptor", +) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/base.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/base.py new file mode 100644 index 000000000000..daa116fa73b2 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/base.py @@ -0,0 +1,640 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.alloydb_v1beta import gapic_version as package_version +from google.cloud.alloydb_v1beta.types import resources, service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AlloyDBAdminTransport(abc.ABC): + """Abstract transport class for AlloyDBAdmin.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "alloydb.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_clusters: gapic_v1.method.wrap_method( + self.list_clusters, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cluster: gapic_v1.method.wrap_method( + self.get_cluster, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_cluster: gapic_v1.method.wrap_method( + self.create_cluster, + default_timeout=None, + client_info=client_info, + ), + self.update_cluster: gapic_v1.method.wrap_method( + self.update_cluster, + default_timeout=None, + client_info=client_info, + ), + self.delete_cluster: gapic_v1.method.wrap_method( + self.delete_cluster, + default_timeout=None, + client_info=client_info, + ), + self.promote_cluster: gapic_v1.method.wrap_method( + self.promote_cluster, + default_timeout=None, + client_info=client_info, + ), + self.restore_cluster: gapic_v1.method.wrap_method( + self.restore_cluster, + default_timeout=None, + client_info=client_info, + ), + self.create_secondary_cluster: gapic_v1.method.wrap_method( + self.create_secondary_cluster, + default_timeout=None, + client_info=client_info, + ), + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, + default_timeout=None, + client_info=client_info, + ), + self.create_secondary_instance: gapic_v1.method.wrap_method( + self.create_secondary_instance, + default_timeout=None, + client_info=client_info, + ), + self.batch_create_instances: gapic_v1.method.wrap_method( + self.batch_create_instances, + default_timeout=None, + client_info=client_info, + ), + self.update_instance: gapic_v1.method.wrap_method( + self.update_instance, + default_timeout=None, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, + default_timeout=None, + client_info=client_info, + ), + self.failover_instance: gapic_v1.method.wrap_method( + self.failover_instance, + default_timeout=None, + client_info=client_info, + ), + self.restart_instance: gapic_v1.method.wrap_method( + self.restart_instance, + default_timeout=None, + client_info=client_info, + ), + self.list_backups: gapic_v1.method.wrap_method( + self.list_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method.wrap_method( + self.get_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_backup: gapic_v1.method.wrap_method( + self.create_backup, + default_timeout=None, + client_info=client_info, + ), + self.update_backup: gapic_v1.method.wrap_method( + self.update_backup, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method.wrap_method( + self.delete_backup, + default_timeout=None, + client_info=client_info, + ), + self.list_supported_database_flags: gapic_v1.method.wrap_method( + self.list_supported_database_flags, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.generate_client_certificate: gapic_v1.method.wrap_method( + self.generate_client_certificate, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_connection_info: gapic_v1.method.wrap_method( + self.get_connection_info, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_clusters( + self, + ) -> Callable[ + [service.ListClustersRequest], + Union[service.ListClustersResponse, Awaitable[service.ListClustersResponse]], + ]: + raise NotImplementedError() + + @property + def get_cluster( + self, + ) -> Callable[ + [service.GetClusterRequest], + Union[resources.Cluster, Awaitable[resources.Cluster]], + ]: + raise NotImplementedError() + + @property + def create_cluster( + self, + ) -> Callable[ + [service.CreateClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_cluster( + self, + ) -> Callable[ + [service.UpdateClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_cluster( + self, + ) -> Callable[ + [service.DeleteClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def promote_cluster( + self, + ) -> Callable[ + [service.PromoteClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def restore_cluster( + self, + ) -> Callable[ + [service.RestoreClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_secondary_cluster( + self, + ) -> Callable[ + [service.CreateSecondaryClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_instances( + self, + ) -> Callable[ + [service.ListInstancesRequest], + Union[service.ListInstancesResponse, Awaitable[service.ListInstancesResponse]], + ]: + raise NotImplementedError() + + @property + def get_instance( + self, + ) -> Callable[ + [service.GetInstanceRequest], + Union[resources.Instance, Awaitable[resources.Instance]], + ]: + raise NotImplementedError() + + @property + def create_instance( + self, + ) -> Callable[ + [service.CreateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_secondary_instance( + self, + ) -> Callable[ + [service.CreateSecondaryInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def batch_create_instances( + self, + ) -> Callable[ + [service.BatchCreateInstancesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_instance( + self, + ) -> Callable[ + [service.UpdateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_instance( + self, + ) -> Callable[ + [service.DeleteInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def failover_instance( + self, + ) -> Callable[ + [service.FailoverInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def restart_instance( + self, + ) -> Callable[ + [service.RestartInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_backups( + self, + ) -> Callable[ + [service.ListBackupsRequest], + Union[service.ListBackupsResponse, Awaitable[service.ListBackupsResponse]], + ]: + raise NotImplementedError() + + @property + def get_backup( + self, + ) -> Callable[ + [service.GetBackupRequest], Union[resources.Backup, Awaitable[resources.Backup]] + ]: + raise NotImplementedError() + + @property + def create_backup( + self, + ) -> Callable[ + [service.CreateBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_backup( + self, + ) -> Callable[ + [service.UpdateBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_backup( + self, + ) -> Callable[ + [service.DeleteBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_supported_database_flags( + self, + ) -> Callable[ + [service.ListSupportedDatabaseFlagsRequest], + Union[ + service.ListSupportedDatabaseFlagsResponse, + Awaitable[service.ListSupportedDatabaseFlagsResponse], + ], + ]: + raise NotImplementedError() + + @property + def generate_client_certificate( + self, + ) -> Callable[ + [service.GenerateClientCertificateRequest], + Union[ + service.GenerateClientCertificateResponse, + Awaitable[service.GenerateClientCertificateResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_connection_info( + self, + ) -> Callable[ + [service.GetConnectionInfoRequest], + Union[resources.ConnectionInfo, Awaitable[resources.ConnectionInfo]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AlloyDBAdminTransport",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc.py new file mode 100644 index 000000000000..71d4e2c698ba --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc.py @@ -0,0 +1,1049 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.alloydb_v1beta.types import resources, service + +from .base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport + + +class AlloyDBAdminGrpcTransport(AlloyDBAdminTransport): + """gRPC backend transport for AlloyDBAdmin. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "alloydb.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "alloydb.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_clusters( + self, + ) -> Callable[[service.ListClustersRequest], service.ListClustersResponse]: + r"""Return a callable for the list clusters method over gRPC. + + Lists Clusters in a given project and location. + + Returns: + Callable[[~.ListClustersRequest], + ~.ListClustersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_clusters" not in self._stubs: + self._stubs["list_clusters"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/ListClusters", + request_serializer=service.ListClustersRequest.serialize, + response_deserializer=service.ListClustersResponse.deserialize, + ) + return self._stubs["list_clusters"] + + @property + def get_cluster(self) -> Callable[[service.GetClusterRequest], resources.Cluster]: + r"""Return a callable for the get cluster method over gRPC. + + Gets details of a single Cluster. + + Returns: + Callable[[~.GetClusterRequest], + ~.Cluster]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster" not in self._stubs: + self._stubs["get_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/GetCluster", + request_serializer=service.GetClusterRequest.serialize, + response_deserializer=resources.Cluster.deserialize, + ) + return self._stubs["get_cluster"] + + @property + def create_cluster( + self, + ) -> Callable[[service.CreateClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the create cluster method over gRPC. + + Creates a new Cluster in a given project and + location. + + Returns: + Callable[[~.CreateClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cluster" not in self._stubs: + self._stubs["create_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/CreateCluster", + request_serializer=service.CreateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_cluster"] + + @property + def update_cluster( + self, + ) -> Callable[[service.UpdateClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the update cluster method over gRPC. + + Updates the parameters of a single Cluster. + + Returns: + Callable[[~.UpdateClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cluster" not in self._stubs: + self._stubs["update_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/UpdateCluster", + request_serializer=service.UpdateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_cluster"] + + @property + def delete_cluster( + self, + ) -> Callable[[service.DeleteClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the delete cluster method over gRPC. + + Deletes a single Cluster. + + Returns: + Callable[[~.DeleteClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cluster" not in self._stubs: + self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/DeleteCluster", + request_serializer=service.DeleteClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_cluster"] + + @property + def promote_cluster( + self, + ) -> Callable[[service.PromoteClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the promote cluster method over gRPC. + + Promotes a SECONDARY cluster. This turns down + replication from the PRIMARY cluster and promotes a + secondary cluster into its own standalone cluster. + Imperative only. + + Returns: + Callable[[~.PromoteClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "promote_cluster" not in self._stubs: + self._stubs["promote_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/PromoteCluster", + request_serializer=service.PromoteClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["promote_cluster"] + + @property + def restore_cluster( + self, + ) -> Callable[[service.RestoreClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the restore cluster method over gRPC. + + Creates a new Cluster in a given project and + location, with a volume restored from the provided + source, either a backup ID or a point-in-time and a + source cluster. + + Returns: + Callable[[~.RestoreClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_cluster" not in self._stubs: + self._stubs["restore_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/RestoreCluster", + request_serializer=service.RestoreClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_cluster"] + + @property + def create_secondary_cluster( + self, + ) -> Callable[[service.CreateSecondaryClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the create secondary cluster method over gRPC. + + Creates a cluster of type SECONDARY in the given + location using the primary cluster as the source. + + Returns: + Callable[[~.CreateSecondaryClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_secondary_cluster" not in self._stubs: + self._stubs["create_secondary_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/CreateSecondaryCluster", + request_serializer=service.CreateSecondaryClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_secondary_cluster"] + + @property + def list_instances( + self, + ) -> Callable[[service.ListInstancesRequest], service.ListInstancesResponse]: + r"""Return a callable for the list instances method over gRPC. + + Lists Instances in a given project and location. + + Returns: + Callable[[~.ListInstancesRequest], + ~.ListInstancesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/ListInstances", + request_serializer=service.ListInstancesRequest.serialize, + response_deserializer=service.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[[service.GetInstanceRequest], resources.Instance]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single Instance. + + Returns: + Callable[[~.GetInstanceRequest], + ~.Instance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/GetInstance", + request_serializer=service.GetInstanceRequest.serialize, + response_deserializer=resources.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[[service.CreateInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the create instance method over gRPC. + + Creates a new Instance in a given project and + location. + + Returns: + Callable[[~.CreateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/CreateInstance", + request_serializer=service.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def create_secondary_instance( + self, + ) -> Callable[[service.CreateSecondaryInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the create secondary instance method over gRPC. + + Creates a new SECONDARY Instance in a given project + and location. + + Returns: + Callable[[~.CreateSecondaryInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_secondary_instance" not in self._stubs: + self._stubs["create_secondary_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/CreateSecondaryInstance", + request_serializer=service.CreateSecondaryInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_secondary_instance"] + + @property + def batch_create_instances( + self, + ) -> Callable[[service.BatchCreateInstancesRequest], operations_pb2.Operation]: + r"""Return a callable for the batch create instances method over gRPC. + + Creates new instances under the given project, + location and cluster. There can be only one primary + instance in a cluster. If the primary instance exists in + the cluster as well as this request, then API will throw + an error. + The primary instance should exist before any read pool + instance is created. If the primary instance is a part + of the request payload, then the API will take care of + creating instances in the correct order. This method is + here to support Google-internal use cases, and is not + meant for external customers to consume. Please do not + start relying on it; its behavior is subject to change + without notice. + + Returns: + Callable[[~.BatchCreateInstancesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_instances" not in self._stubs: + self._stubs["batch_create_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/BatchCreateInstances", + request_serializer=service.BatchCreateInstancesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_create_instances"] + + @property + def update_instance( + self, + ) -> Callable[[service.UpdateInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the update instance method over gRPC. + + Updates the parameters of a single Instance. + + Returns: + Callable[[~.UpdateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/UpdateInstance", + request_serializer=service.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_instance"] + + @property + def delete_instance( + self, + ) -> Callable[[service.DeleteInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single Instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/DeleteInstance", + request_serializer=service.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def failover_instance( + self, + ) -> Callable[[service.FailoverInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the failover instance method over gRPC. + + Forces a Failover for a highly available instance. + Failover promotes the HA standby instance as the new + primary. Imperative only. + + Returns: + Callable[[~.FailoverInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "failover_instance" not in self._stubs: + self._stubs["failover_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/FailoverInstance", + request_serializer=service.FailoverInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["failover_instance"] + + @property + def restart_instance( + self, + ) -> Callable[[service.RestartInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the restart instance method over gRPC. + + Restart an Instance in a cluster. + Imperative only. + + Returns: + Callable[[~.RestartInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restart_instance" not in self._stubs: + self._stubs["restart_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/RestartInstance", + request_serializer=service.RestartInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restart_instance"] + + @property + def list_backups( + self, + ) -> Callable[[service.ListBackupsRequest], service.ListBackupsResponse]: + r"""Return a callable for the list backups method over gRPC. + + Lists Backups in a given project and location. + + Returns: + Callable[[~.ListBackupsRequest], + ~.ListBackupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/ListBackups", + request_serializer=service.ListBackupsRequest.serialize, + response_deserializer=service.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup(self) -> Callable[[service.GetBackupRequest], resources.Backup]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a single Backup. + + Returns: + Callable[[~.GetBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/GetBackup", + request_serializer=service.GetBackupRequest.serialize, + response_deserializer=resources.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def create_backup( + self, + ) -> Callable[[service.CreateBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the create backup method over gRPC. + + Creates a new Backup in a given project and location. + + Returns: + Callable[[~.CreateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup" not in self._stubs: + self._stubs["create_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/CreateBackup", + request_serializer=service.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup"] + + @property + def update_backup( + self, + ) -> Callable[[service.UpdateBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the update backup method over gRPC. + + Updates the parameters of a single Backup. + + Returns: + Callable[[~.UpdateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/UpdateBackup", + request_serializer=service.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def delete_backup( + self, + ) -> Callable[[service.DeleteBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a single Backup. + + Returns: + Callable[[~.DeleteBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/DeleteBackup", + request_serializer=service.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def list_supported_database_flags( + self, + ) -> Callable[ + [service.ListSupportedDatabaseFlagsRequest], + service.ListSupportedDatabaseFlagsResponse, + ]: + r"""Return a callable for the list supported database flags method over gRPC. + + Lists SupportedDatabaseFlags for a given project and + location. + + Returns: + Callable[[~.ListSupportedDatabaseFlagsRequest], + ~.ListSupportedDatabaseFlagsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_supported_database_flags" not in self._stubs: + self._stubs[ + "list_supported_database_flags" + ] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/ListSupportedDatabaseFlags", + request_serializer=service.ListSupportedDatabaseFlagsRequest.serialize, + response_deserializer=service.ListSupportedDatabaseFlagsResponse.deserialize, + ) + return self._stubs["list_supported_database_flags"] + + @property + def generate_client_certificate( + self, + ) -> Callable[ + [service.GenerateClientCertificateRequest], + service.GenerateClientCertificateResponse, + ]: + r"""Return a callable for the generate client certificate method over gRPC. + + Generate a client certificate signed by a Cluster CA. + The sole purpose of this endpoint is to support the Auth + Proxy client and the endpoint's behavior is subject to + change without notice, so do not rely on its behavior + remaining constant. Future changes will not break the + Auth Proxy client. + + Returns: + Callable[[~.GenerateClientCertificateRequest], + ~.GenerateClientCertificateResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_client_certificate" not in self._stubs: + self._stubs["generate_client_certificate"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/GenerateClientCertificate", + request_serializer=service.GenerateClientCertificateRequest.serialize, + response_deserializer=service.GenerateClientCertificateResponse.deserialize, + ) + return self._stubs["generate_client_certificate"] + + @property + def get_connection_info( + self, + ) -> Callable[[service.GetConnectionInfoRequest], resources.ConnectionInfo]: + r"""Return a callable for the get connection info method over gRPC. + + Get instance metadata used for a connection. + + Returns: + Callable[[~.GetConnectionInfoRequest], + ~.ConnectionInfo]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_connection_info" not in self._stubs: + self._stubs["get_connection_info"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/GetConnectionInfo", + request_serializer=service.GetConnectionInfoRequest.serialize, + response_deserializer=resources.ConnectionInfo.deserialize, + ) + return self._stubs["get_connection_info"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AlloyDBAdminGrpcTransport",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc_asyncio.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc_asyncio.py new file mode 100644 index 000000000000..51c96c6410f9 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc_asyncio.py @@ -0,0 +1,1070 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.alloydb_v1beta.types import resources, service + +from .base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport +from .grpc import AlloyDBAdminGrpcTransport + + +class AlloyDBAdminGrpcAsyncIOTransport(AlloyDBAdminTransport): + """gRPC AsyncIO backend transport for AlloyDBAdmin. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "alloydb.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "alloydb.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_clusters( + self, + ) -> Callable[ + [service.ListClustersRequest], Awaitable[service.ListClustersResponse] + ]: + r"""Return a callable for the list clusters method over gRPC. + + Lists Clusters in a given project and location. + + Returns: + Callable[[~.ListClustersRequest], + Awaitable[~.ListClustersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_clusters" not in self._stubs: + self._stubs["list_clusters"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/ListClusters", + request_serializer=service.ListClustersRequest.serialize, + response_deserializer=service.ListClustersResponse.deserialize, + ) + return self._stubs["list_clusters"] + + @property + def get_cluster( + self, + ) -> Callable[[service.GetClusterRequest], Awaitable[resources.Cluster]]: + r"""Return a callable for the get cluster method over gRPC. + + Gets details of a single Cluster. + + Returns: + Callable[[~.GetClusterRequest], + Awaitable[~.Cluster]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster" not in self._stubs: + self._stubs["get_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/GetCluster", + request_serializer=service.GetClusterRequest.serialize, + response_deserializer=resources.Cluster.deserialize, + ) + return self._stubs["get_cluster"] + + @property + def create_cluster( + self, + ) -> Callable[[service.CreateClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create cluster method over gRPC. + + Creates a new Cluster in a given project and + location. + + Returns: + Callable[[~.CreateClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cluster" not in self._stubs: + self._stubs["create_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/CreateCluster", + request_serializer=service.CreateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_cluster"] + + @property + def update_cluster( + self, + ) -> Callable[[service.UpdateClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update cluster method over gRPC. + + Updates the parameters of a single Cluster. + + Returns: + Callable[[~.UpdateClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cluster" not in self._stubs: + self._stubs["update_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/UpdateCluster", + request_serializer=service.UpdateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_cluster"] + + @property + def delete_cluster( + self, + ) -> Callable[[service.DeleteClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete cluster method over gRPC. + + Deletes a single Cluster. + + Returns: + Callable[[~.DeleteClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cluster" not in self._stubs: + self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/DeleteCluster", + request_serializer=service.DeleteClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_cluster"] + + @property + def promote_cluster( + self, + ) -> Callable[[service.PromoteClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the promote cluster method over gRPC. + + Promotes a SECONDARY cluster. This turns down + replication from the PRIMARY cluster and promotes a + secondary cluster into its own standalone cluster. + Imperative only. + + Returns: + Callable[[~.PromoteClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "promote_cluster" not in self._stubs: + self._stubs["promote_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/PromoteCluster", + request_serializer=service.PromoteClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["promote_cluster"] + + @property + def restore_cluster( + self, + ) -> Callable[[service.RestoreClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the restore cluster method over gRPC. + + Creates a new Cluster in a given project and + location, with a volume restored from the provided + source, either a backup ID or a point-in-time and a + source cluster. + + Returns: + Callable[[~.RestoreClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_cluster" not in self._stubs: + self._stubs["restore_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/RestoreCluster", + request_serializer=service.RestoreClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_cluster"] + + @property + def create_secondary_cluster( + self, + ) -> Callable[ + [service.CreateSecondaryClusterRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create secondary cluster method over gRPC. + + Creates a cluster of type SECONDARY in the given + location using the primary cluster as the source. + + Returns: + Callable[[~.CreateSecondaryClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_secondary_cluster" not in self._stubs: + self._stubs["create_secondary_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/CreateSecondaryCluster", + request_serializer=service.CreateSecondaryClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_secondary_cluster"] + + @property + def list_instances( + self, + ) -> Callable[ + [service.ListInstancesRequest], Awaitable[service.ListInstancesResponse] + ]: + r"""Return a callable for the list instances method over gRPC. + + Lists Instances in a given project and location. + + Returns: + Callable[[~.ListInstancesRequest], + Awaitable[~.ListInstancesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/ListInstances", + request_serializer=service.ListInstancesRequest.serialize, + response_deserializer=service.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[[service.GetInstanceRequest], Awaitable[resources.Instance]]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single Instance. + + Returns: + Callable[[~.GetInstanceRequest], + Awaitable[~.Instance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/GetInstance", + request_serializer=service.GetInstanceRequest.serialize, + response_deserializer=resources.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[[service.CreateInstanceRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create instance method over gRPC. + + Creates a new Instance in a given project and + location. + + Returns: + Callable[[~.CreateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/CreateInstance", + request_serializer=service.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def create_secondary_instance( + self, + ) -> Callable[ + [service.CreateSecondaryInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create secondary instance method over gRPC. + + Creates a new SECONDARY Instance in a given project + and location. + + Returns: + Callable[[~.CreateSecondaryInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_secondary_instance" not in self._stubs: + self._stubs["create_secondary_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/CreateSecondaryInstance", + request_serializer=service.CreateSecondaryInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_secondary_instance"] + + @property + def batch_create_instances( + self, + ) -> Callable[ + [service.BatchCreateInstancesRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the batch create instances method over gRPC. + + Creates new instances under the given project, + location and cluster. There can be only one primary + instance in a cluster. If the primary instance exists in + the cluster as well as this request, then API will throw + an error. + The primary instance should exist before any read pool + instance is created. If the primary instance is a part + of the request payload, then the API will take care of + creating instances in the correct order. This method is + here to support Google-internal use cases, and is not + meant for external customers to consume. Please do not + start relying on it; its behavior is subject to change + without notice. + + Returns: + Callable[[~.BatchCreateInstancesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_instances" not in self._stubs: + self._stubs["batch_create_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/BatchCreateInstances", + request_serializer=service.BatchCreateInstancesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_create_instances"] + + @property + def update_instance( + self, + ) -> Callable[[service.UpdateInstanceRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update instance method over gRPC. + + Updates the parameters of a single Instance. + + Returns: + Callable[[~.UpdateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/UpdateInstance", + request_serializer=service.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_instance"] + + @property + def delete_instance( + self, + ) -> Callable[[service.DeleteInstanceRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single Instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/DeleteInstance", + request_serializer=service.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def failover_instance( + self, + ) -> Callable[ + [service.FailoverInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the failover instance method over gRPC. + + Forces a Failover for a highly available instance. + Failover promotes the HA standby instance as the new + primary. Imperative only. + + Returns: + Callable[[~.FailoverInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "failover_instance" not in self._stubs: + self._stubs["failover_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/FailoverInstance", + request_serializer=service.FailoverInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["failover_instance"] + + @property + def restart_instance( + self, + ) -> Callable[ + [service.RestartInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the restart instance method over gRPC. + + Restart an Instance in a cluster. + Imperative only. + + Returns: + Callable[[~.RestartInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restart_instance" not in self._stubs: + self._stubs["restart_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/RestartInstance", + request_serializer=service.RestartInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restart_instance"] + + @property + def list_backups( + self, + ) -> Callable[[service.ListBackupsRequest], Awaitable[service.ListBackupsResponse]]: + r"""Return a callable for the list backups method over gRPC. + + Lists Backups in a given project and location. + + Returns: + Callable[[~.ListBackupsRequest], + Awaitable[~.ListBackupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/ListBackups", + request_serializer=service.ListBackupsRequest.serialize, + response_deserializer=service.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup( + self, + ) -> Callable[[service.GetBackupRequest], Awaitable[resources.Backup]]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a single Backup. + + Returns: + Callable[[~.GetBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/GetBackup", + request_serializer=service.GetBackupRequest.serialize, + response_deserializer=resources.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def create_backup( + self, + ) -> Callable[[service.CreateBackupRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create backup method over gRPC. + + Creates a new Backup in a given project and location. + + Returns: + Callable[[~.CreateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup" not in self._stubs: + self._stubs["create_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/CreateBackup", + request_serializer=service.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup"] + + @property + def update_backup( + self, + ) -> Callable[[service.UpdateBackupRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update backup method over gRPC. + + Updates the parameters of a single Backup. + + Returns: + Callable[[~.UpdateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/UpdateBackup", + request_serializer=service.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def delete_backup( + self, + ) -> Callable[[service.DeleteBackupRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a single Backup. + + Returns: + Callable[[~.DeleteBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/DeleteBackup", + request_serializer=service.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def list_supported_database_flags( + self, + ) -> Callable[ + [service.ListSupportedDatabaseFlagsRequest], + Awaitable[service.ListSupportedDatabaseFlagsResponse], + ]: + r"""Return a callable for the list supported database flags method over gRPC. + + Lists SupportedDatabaseFlags for a given project and + location. + + Returns: + Callable[[~.ListSupportedDatabaseFlagsRequest], + Awaitable[~.ListSupportedDatabaseFlagsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_supported_database_flags" not in self._stubs: + self._stubs[ + "list_supported_database_flags" + ] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/ListSupportedDatabaseFlags", + request_serializer=service.ListSupportedDatabaseFlagsRequest.serialize, + response_deserializer=service.ListSupportedDatabaseFlagsResponse.deserialize, + ) + return self._stubs["list_supported_database_flags"] + + @property + def generate_client_certificate( + self, + ) -> Callable[ + [service.GenerateClientCertificateRequest], + Awaitable[service.GenerateClientCertificateResponse], + ]: + r"""Return a callable for the generate client certificate method over gRPC. + + Generate a client certificate signed by a Cluster CA. + The sole purpose of this endpoint is to support the Auth + Proxy client and the endpoint's behavior is subject to + change without notice, so do not rely on its behavior + remaining constant. Future changes will not break the + Auth Proxy client. + + Returns: + Callable[[~.GenerateClientCertificateRequest], + Awaitable[~.GenerateClientCertificateResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_client_certificate" not in self._stubs: + self._stubs["generate_client_certificate"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/GenerateClientCertificate", + request_serializer=service.GenerateClientCertificateRequest.serialize, + response_deserializer=service.GenerateClientCertificateResponse.deserialize, + ) + return self._stubs["generate_client_certificate"] + + @property + def get_connection_info( + self, + ) -> Callable[ + [service.GetConnectionInfoRequest], Awaitable[resources.ConnectionInfo] + ]: + r"""Return a callable for the get connection info method over gRPC. + + Get instance metadata used for a connection. + + Returns: + Callable[[~.GetConnectionInfoRequest], + Awaitable[~.ConnectionInfo]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_connection_info" not in self._stubs: + self._stubs["get_connection_info"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/GetConnectionInfo", + request_serializer=service.GetConnectionInfoRequest.serialize, + response_deserializer=resources.ConnectionInfo.deserialize, + ) + return self._stubs["get_connection_info"] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("AlloyDBAdminGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py new file mode 100644 index 000000000000..39187f892184 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py @@ -0,0 +1,4071 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.alloydb_v1beta.types import resources, service + +from .base import AlloyDBAdminTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AlloyDBAdminRestInterceptor: + """Interceptor for AlloyDBAdmin. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AlloyDBAdminRestTransport. + + .. code-block:: python + class MyCustomAlloyDBAdminInterceptor(AlloyDBAdminRestInterceptor): + def pre_batch_create_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_secondary_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_secondary_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_secondary_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_secondary_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_failover_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_failover_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_client_certificate(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_client_certificate(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_connection_info(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_connection_info(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_clusters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_clusters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_supported_database_flags(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_supported_database_flags(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_promote_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_promote_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_restart_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restart_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_restore_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AlloyDBAdminRestTransport(interceptor=MyCustomAlloyDBAdminInterceptor()) + client = AlloyDBAdminClient(transport=transport) + + + """ + + def pre_batch_create_instances( + self, + request: service.BatchCreateInstancesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.BatchCreateInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_create_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_batch_create_instances( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for batch_create_instances + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_create_backup( + self, request: service.CreateBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.CreateBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_create_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_backup + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_create_cluster( + self, request: service.CreateClusterRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.CreateClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_create_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_create_instance( + self, + request: service.CreateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_create_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_create_secondary_cluster( + self, + request: service.CreateSecondaryClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.CreateSecondaryClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_secondary_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_create_secondary_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_secondary_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_create_secondary_instance( + self, + request: service.CreateSecondaryInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.CreateSecondaryInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_secondary_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_create_secondary_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_secondary_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_backup( + self, request: service.DeleteBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.DeleteBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_delete_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_cluster( + self, request: service.DeleteClusterRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.DeleteClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_delete_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_instance( + self, + request: service.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_delete_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_failover_instance( + self, + request: service.FailoverInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.FailoverInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for failover_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_failover_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for failover_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_generate_client_certificate( + self, + request: service.GenerateClientCertificateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.GenerateClientCertificateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_client_certificate + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_generate_client_certificate( + self, response: service.GenerateClientCertificateResponse + ) -> service.GenerateClientCertificateResponse: + """Post-rpc interceptor for generate_client_certificate + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_backup( + self, request: service.GetBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_backup(self, response: resources.Backup) -> resources.Backup: + """Post-rpc interceptor for get_backup + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_cluster( + self, request: service.GetClusterRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_cluster(self, response: resources.Cluster) -> resources.Cluster: + """Post-rpc interceptor for get_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_connection_info( + self, + request: service.GetConnectionInfoRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.GetConnectionInfoRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_connection_info + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_connection_info( + self, response: resources.ConnectionInfo + ) -> resources.ConnectionInfo: + """Post-rpc interceptor for get_connection_info + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_instance( + self, request: service.GetInstanceRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_instance(self, response: resources.Instance) -> resources.Instance: + """Post-rpc interceptor for get_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_backups( + self, request: service.ListBackupsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListBackupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backups + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_backups( + self, response: service.ListBackupsResponse + ) -> service.ListBackupsResponse: + """Post-rpc interceptor for list_backups + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_clusters( + self, request: service.ListClustersRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListClustersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_clusters + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_clusters( + self, response: service.ListClustersResponse + ) -> service.ListClustersResponse: + """Post-rpc interceptor for list_clusters + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_instances( + self, request: service.ListInstancesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_instances( + self, response: service.ListInstancesResponse + ) -> service.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_supported_database_flags( + self, + request: service.ListSupportedDatabaseFlagsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.ListSupportedDatabaseFlagsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_supported_database_flags + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_supported_database_flags( + self, response: service.ListSupportedDatabaseFlagsResponse + ) -> service.ListSupportedDatabaseFlagsResponse: + """Post-rpc interceptor for list_supported_database_flags + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_promote_cluster( + self, + request: service.PromoteClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.PromoteClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for promote_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_promote_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for promote_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_restart_instance( + self, + request: service.RestartInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.RestartInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for restart_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_restart_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for restart_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_restore_cluster( + self, + request: service.RestoreClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.RestoreClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for restore_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_restore_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_update_backup( + self, request: service.UpdateBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.UpdateBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_update_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backup + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_update_cluster( + self, request: service.UpdateClusterRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.UpdateClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_update_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_update_instance( + self, + request: service.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AlloyDBAdminRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AlloyDBAdminRestInterceptor + + +class AlloyDBAdminRestTransport(AlloyDBAdminTransport): + """REST backend transport for AlloyDBAdmin. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "alloydb.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AlloyDBAdminRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AlloyDBAdminRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _BatchCreateInstances(AlloyDBAdminRestStub): + def __hash__(self): + return hash("BatchCreateInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.BatchCreateInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the batch create instances method over HTTP. + + Args: + request (~.service.BatchCreateInstancesRequest): + The request object. Message for creating a batch of + instances under the specified cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/clusters/*}/instances:batchCreate", + "body": "requests", + }, + ] + request, metadata = self._interceptor.pre_batch_create_instances( + request, metadata + ) + pb_request = service.BatchCreateInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_create_instances(resp) + return resp + + class _CreateBackup(AlloyDBAdminRestStub): + def __hash__(self): + return hash("CreateBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backup method over HTTP. + + Args: + request (~.service.CreateBackupRequest): + The request object. Message for creating a Backup + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*}/backups", + "body": "backup", + }, + ] + request, metadata = self._interceptor.pre_create_backup(request, metadata) + pb_request = service.CreateBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup(resp) + return resp + + class _CreateCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("CreateCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "clusterId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create cluster method over HTTP. + + Args: + request (~.service.CreateClusterRequest): + The request object. Message for creating a Cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*}/clusters", + "body": "cluster", + }, + ] + request, metadata = self._interceptor.pre_create_cluster(request, metadata) + pb_request = service.CreateClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_cluster(resp) + return resp + + class _CreateInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("CreateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.service.CreateInstanceRequest): + The request object. Message for creating a Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/clusters/*}/instances", + "body": "instance", + }, + ] + request, metadata = self._interceptor.pre_create_instance(request, metadata) + pb_request = service.CreateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_instance(resp) + return resp + + class _CreateSecondaryCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("CreateSecondaryCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "clusterId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateSecondaryClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create secondary cluster method over HTTP. + + Args: + request (~.service.CreateSecondaryClusterRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*}/clusters:createsecondary", + "body": "cluster", + }, + ] + request, metadata = self._interceptor.pre_create_secondary_cluster( + request, metadata + ) + pb_request = service.CreateSecondaryClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_secondary_cluster(resp) + return resp + + class _CreateSecondaryInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("CreateSecondaryInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateSecondaryInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create secondary instance method over HTTP. + + Args: + request (~.service.CreateSecondaryInstanceRequest): + The request object. Message for creating a Secondary + Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/clusters/*}/instances:createsecondary", + "body": "instance", + }, + ] + request, metadata = self._interceptor.pre_create_secondary_instance( + request, metadata + ) + pb_request = service.CreateSecondaryInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_secondary_instance(resp) + return resp + + class _DeleteBackup(AlloyDBAdminRestStub): + def __hash__(self): + return hash("DeleteBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.DeleteBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup method over HTTP. + + Args: + request (~.service.DeleteBackupRequest): + The request object. Message for deleting a Backup + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/backups/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup(request, metadata) + pb_request = service.DeleteBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup(resp) + return resp + + class _DeleteCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("DeleteCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.DeleteClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete cluster method over HTTP. + + Args: + request (~.service.DeleteClusterRequest): + The request object. Message for deleting a Cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/clusters/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_cluster(request, metadata) + pb_request = service.DeleteClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_cluster(resp) + return resp + + class _DeleteInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("DeleteInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete instance method over HTTP. + + Args: + request (~.service.DeleteInstanceRequest): + The request object. Message for deleting a Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/clusters/*/instances/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_instance(request, metadata) + pb_request = service.DeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_instance(resp) + return resp + + class _FailoverInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("FailoverInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.FailoverInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the failover instance method over HTTP. + + Args: + request (~.service.FailoverInstanceRequest): + The request object. Message for triggering failover on an + Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/clusters/*/instances/*}:failover", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_failover_instance( + request, metadata + ) + pb_request = service.FailoverInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_failover_instance(resp) + return resp + + class _GenerateClientCertificate(AlloyDBAdminRestStub): + def __hash__(self): + return hash("GenerateClientCertificate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GenerateClientCertificateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.GenerateClientCertificateResponse: + r"""Call the generate client + certificate method over HTTP. + + Args: + request (~.service.GenerateClientCertificateRequest): + The request object. Message for requests to generate a + client certificate signed by the Cluster + CA. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.GenerateClientCertificateResponse: + Message returned by a + GenerateClientCertificate operation. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/clusters/*}:generateClientCertificate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_generate_client_certificate( + request, metadata + ) + pb_request = service.GenerateClientCertificateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.GenerateClientCertificateResponse() + pb_resp = service.GenerateClientCertificateResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_client_certificate(resp) + return resp + + class _GetBackup(AlloyDBAdminRestStub): + def __hash__(self): + return hash("GetBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Backup: + r"""Call the get backup method over HTTP. + + Args: + request (~.service.GetBackupRequest): + The request object. Message for getting a Backup + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Backup: + Message describing Backup object + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/backups/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup(request, metadata) + pb_request = service.GetBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Backup() + pb_resp = resources.Backup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup(resp) + return resp + + class _GetCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("GetCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Cluster: + r"""Call the get cluster method over HTTP. + + Args: + request (~.service.GetClusterRequest): + The request object. Message for getting a Cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Cluster: + A cluster is a collection of regional + AlloyDB resources. It can include a + primary instance and one or more read + pool instances. All cluster resources + share a storage layer, which scales as + needed. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/clusters/*}", + }, + ] + request, metadata = self._interceptor.pre_get_cluster(request, metadata) + pb_request = service.GetClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Cluster() + pb_resp = resources.Cluster.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cluster(resp) + return resp + + class _GetConnectionInfo(AlloyDBAdminRestStub): + def __hash__(self): + return hash("GetConnectionInfo") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetConnectionInfoRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.ConnectionInfo: + r"""Call the get connection info method over HTTP. + + Args: + request (~.service.GetConnectionInfoRequest): + The request object. Request message for + GetConnectionInfo. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.ConnectionInfo: + ConnectionInfo singleton resource. + https://google.aip.dev/156 + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/clusters/*/instances/*}/connectionInfo", + }, + ] + request, metadata = self._interceptor.pre_get_connection_info( + request, metadata + ) + pb_request = service.GetConnectionInfoRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.ConnectionInfo() + pb_resp = resources.ConnectionInfo.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_connection_info(resp) + return resp + + class _GetInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("GetInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.service.GetInstanceRequest): + The request object. Message for getting a Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Instance: + An Instance is a computing unit that + an end customer can connect to. It's the + main unit of computing resources in + AlloyDB. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/clusters/*/instances/*}", + }, + ] + request, metadata = self._interceptor.pre_get_instance(request, metadata) + pb_request = service.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Instance() + pb_resp = resources.Instance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance(resp) + return resp + + class _ListBackups(AlloyDBAdminRestStub): + def __hash__(self): + return hash("ListBackups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListBackupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListBackupsResponse: + r"""Call the list backups method over HTTP. + + Args: + request (~.service.ListBackupsRequest): + The request object. Message for requesting list of + Backups + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListBackupsResponse: + Message for response to listing + Backups + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*}/backups", + }, + ] + request, metadata = self._interceptor.pre_list_backups(request, metadata) + pb_request = service.ListBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListBackupsResponse() + pb_resp = service.ListBackupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backups(resp) + return resp + + class _ListClusters(AlloyDBAdminRestStub): + def __hash__(self): + return hash("ListClusters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListClustersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListClustersResponse: + r"""Call the list clusters method over HTTP. + + Args: + request (~.service.ListClustersRequest): + The request object. Message for requesting list of + Clusters + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListClustersResponse: + Message for response to listing + Clusters + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*}/clusters", + }, + ] + request, metadata = self._interceptor.pre_list_clusters(request, metadata) + pb_request = service.ListClustersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListClustersResponse() + pb_resp = service.ListClustersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_clusters(resp) + return resp + + class _ListInstances(AlloyDBAdminRestStub): + def __hash__(self): + return hash("ListInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.service.ListInstancesRequest): + The request object. Message for requesting list of + Instances + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListInstancesResponse: + Message for response to listing + Instances + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/clusters/*}/instances", + }, + ] + request, metadata = self._interceptor.pre_list_instances(request, metadata) + pb_request = service.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListInstancesResponse() + pb_resp = service.ListInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instances(resp) + return resp + + class _ListSupportedDatabaseFlags(AlloyDBAdminRestStub): + def __hash__(self): + return hash("ListSupportedDatabaseFlags") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListSupportedDatabaseFlagsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListSupportedDatabaseFlagsResponse: + r"""Call the list supported database + flags method over HTTP. + + Args: + request (~.service.ListSupportedDatabaseFlagsRequest): + The request object. Message for listing the information + about the supported Database flags. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListSupportedDatabaseFlagsResponse: + Message for response to listing + SupportedDatabaseFlags. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*}/supportedDatabaseFlags", + }, + ] + request, metadata = self._interceptor.pre_list_supported_database_flags( + request, metadata + ) + pb_request = service.ListSupportedDatabaseFlagsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListSupportedDatabaseFlagsResponse() + pb_resp = service.ListSupportedDatabaseFlagsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_supported_database_flags(resp) + return resp + + class _PromoteCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("PromoteCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.PromoteClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the promote cluster method over HTTP. + + Args: + request (~.service.PromoteClusterRequest): + The request object. Message for promoting a Cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/clusters/*}:promote", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_promote_cluster(request, metadata) + pb_request = service.PromoteClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_promote_cluster(resp) + return resp + + class _RestartInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("RestartInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.RestartInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the restart instance method over HTTP. + + Args: + request (~.service.RestartInstanceRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/clusters/*/instances/*}:restart", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_restart_instance( + request, metadata + ) + pb_request = service.RestartInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restart_instance(resp) + return resp + + class _RestoreCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("RestoreCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.RestoreClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the restore cluster method over HTTP. + + Args: + request (~.service.RestoreClusterRequest): + The request object. Message for restoring a Cluster from + a backup or another cluster at a given + point in time. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*}/clusters:restore", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_restore_cluster(request, metadata) + pb_request = service.RestoreClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restore_cluster(resp) + return resp + + class _UpdateBackup(AlloyDBAdminRestStub): + def __hash__(self): + return hash("UpdateBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update backup method over HTTP. + + Args: + request (~.service.UpdateBackupRequest): + The request object. Message for updating a Backup + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{backup.name=projects/*/locations/*/backups/*}", + "body": "backup", + }, + ] + request, metadata = self._interceptor.pre_update_backup(request, metadata) + pb_request = service.UpdateBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_backup(resp) + return resp + + class _UpdateCluster(AlloyDBAdminRestStub): + def __hash__(self): + return hash("UpdateCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update cluster method over HTTP. + + Args: + request (~.service.UpdateClusterRequest): + The request object. Message for updating a Cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{cluster.name=projects/*/locations/*/clusters/*}", + "body": "cluster", + }, + ] + request, metadata = self._interceptor.pre_update_cluster(request, metadata) + pb_request = service.UpdateClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_cluster(resp) + return resp + + class _UpdateInstance(AlloyDBAdminRestStub): + def __hash__(self): + return hash("UpdateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.service.UpdateInstanceRequest): + The request object. Message for updating a Instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{instance.name=projects/*/locations/*/clusters/*/instances/*}", + "body": "instance", + }, + ] + request, metadata = self._interceptor.pre_update_instance(request, metadata) + pb_request = service.UpdateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_instance(resp) + return resp + + @property + def batch_create_instances( + self, + ) -> Callable[[service.BatchCreateInstancesRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup( + self, + ) -> Callable[[service.CreateBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_cluster( + self, + ) -> Callable[[service.CreateClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_instance( + self, + ) -> Callable[[service.CreateInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_secondary_cluster( + self, + ) -> Callable[[service.CreateSecondaryClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSecondaryCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_secondary_instance( + self, + ) -> Callable[[service.CreateSecondaryInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSecondaryInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup( + self, + ) -> Callable[[service.DeleteBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_cluster( + self, + ) -> Callable[[service.DeleteClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance( + self, + ) -> Callable[[service.DeleteInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def failover_instance( + self, + ) -> Callable[[service.FailoverInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_client_certificate( + self, + ) -> Callable[ + [service.GenerateClientCertificateRequest], + service.GenerateClientCertificateResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateClientCertificate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup(self) -> Callable[[service.GetBackupRequest], resources.Backup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_cluster(self) -> Callable[[service.GetClusterRequest], resources.Cluster]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_connection_info( + self, + ) -> Callable[[service.GetConnectionInfoRequest], resources.ConnectionInfo]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetConnectionInfo(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance( + self, + ) -> Callable[[service.GetInstanceRequest], resources.Instance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backups( + self, + ) -> Callable[[service.ListBackupsRequest], service.ListBackupsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_clusters( + self, + ) -> Callable[[service.ListClustersRequest], service.ListClustersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListClusters(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances( + self, + ) -> Callable[[service.ListInstancesRequest], service.ListInstancesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_supported_database_flags( + self, + ) -> Callable[ + [service.ListSupportedDatabaseFlagsRequest], + service.ListSupportedDatabaseFlagsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSupportedDatabaseFlags(self._session, self._host, self._interceptor) # type: ignore + + @property + def promote_cluster( + self, + ) -> Callable[[service.PromoteClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PromoteCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def restart_instance( + self, + ) -> Callable[[service.RestartInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestartInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def restore_cluster( + self, + ) -> Callable[[service.RestoreClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup( + self, + ) -> Callable[[service.UpdateBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_cluster( + self, + ) -> Callable[[service.UpdateClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance( + self, + ) -> Callable[[service.UpdateInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(AlloyDBAdminRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(AlloyDBAdminRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(AlloyDBAdminRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(AlloyDBAdminRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(AlloyDBAdminRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(AlloyDBAdminRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AlloyDBAdminRestTransport",) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py new file mode 100644 index 000000000000..b5b70ff1bbed --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .resources import ( + AutomatedBackupPolicy, + Backup, + BackupSource, + Cluster, + ConnectionInfo, + ContinuousBackupConfig, + ContinuousBackupInfo, + ContinuousBackupSource, + DatabaseVersion, + EncryptionConfig, + EncryptionInfo, + Instance, + InstanceView, + MigrationSource, + SslConfig, + SupportedDatabaseFlag, + UserPassword, +) +from .service import ( + BatchCreateInstancesMetadata, + BatchCreateInstancesRequest, + BatchCreateInstancesResponse, + BatchCreateInstanceStatus, + CreateBackupRequest, + CreateClusterRequest, + CreateInstanceRequest, + CreateInstanceRequests, + CreateSecondaryClusterRequest, + CreateSecondaryInstanceRequest, + DeleteBackupRequest, + DeleteClusterRequest, + DeleteInstanceRequest, + FailoverInstanceRequest, + GenerateClientCertificateRequest, + GenerateClientCertificateResponse, + GetBackupRequest, + GetClusterRequest, + GetConnectionInfoRequest, + GetInstanceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListClustersRequest, + ListClustersResponse, + ListInstancesRequest, + ListInstancesResponse, + ListSupportedDatabaseFlagsRequest, + ListSupportedDatabaseFlagsResponse, + OperationMetadata, + PromoteClusterRequest, + RestartInstanceRequest, + RestoreClusterRequest, + UpdateBackupRequest, + UpdateClusterRequest, + UpdateInstanceRequest, +) + +__all__ = ( + "AutomatedBackupPolicy", + "Backup", + "BackupSource", + "Cluster", + "ConnectionInfo", + "ContinuousBackupConfig", + "ContinuousBackupInfo", + "ContinuousBackupSource", + "EncryptionConfig", + "EncryptionInfo", + "Instance", + "MigrationSource", + "SslConfig", + "SupportedDatabaseFlag", + "UserPassword", + "DatabaseVersion", + "InstanceView", + "BatchCreateInstancesMetadata", + "BatchCreateInstancesRequest", + "BatchCreateInstancesResponse", + "BatchCreateInstanceStatus", + "CreateBackupRequest", + "CreateClusterRequest", + "CreateInstanceRequest", + "CreateInstanceRequests", + "CreateSecondaryClusterRequest", + "CreateSecondaryInstanceRequest", + "DeleteBackupRequest", + "DeleteClusterRequest", + "DeleteInstanceRequest", + "FailoverInstanceRequest", + "GenerateClientCertificateRequest", + "GenerateClientCertificateResponse", + "GetBackupRequest", + "GetClusterRequest", + "GetConnectionInfoRequest", + "GetInstanceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "ListClustersRequest", + "ListClustersResponse", + "ListInstancesRequest", + "ListInstancesResponse", + "ListSupportedDatabaseFlagsRequest", + "ListSupportedDatabaseFlagsResponse", + "OperationMetadata", + "PromoteClusterRequest", + "RestartInstanceRequest", + "RestoreClusterRequest", + "UpdateBackupRequest", + "UpdateClusterRequest", + "UpdateInstanceRequest", +) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py new file mode 100644 index 000000000000..7bb041fff6a7 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py @@ -0,0 +1,1742 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.alloydb.v1beta", + manifest={ + "DatabaseVersion", + "InstanceView", + "UserPassword", + "MigrationSource", + "EncryptionConfig", + "EncryptionInfo", + "SslConfig", + "AutomatedBackupPolicy", + "ContinuousBackupConfig", + "ContinuousBackupInfo", + "BackupSource", + "ContinuousBackupSource", + "Cluster", + "Instance", + "ConnectionInfo", + "Backup", + "SupportedDatabaseFlag", + }, +) + + +class DatabaseVersion(proto.Enum): + r"""The supported database engine versions. + + Values: + DATABASE_VERSION_UNSPECIFIED (0): + This is an unknown database version. + POSTGRES_13 (1): + DEPRECATED - The database version is Postgres + 13. + POSTGRES_14 (2): + The database version is Postgres 14. + """ + DATABASE_VERSION_UNSPECIFIED = 0 + POSTGRES_13 = 1 + POSTGRES_14 = 2 + + +class InstanceView(proto.Enum): + r"""View on Instance. Pass this enum to rpcs that returns an + Instance message to control which subsets of fields to get. + + Values: + INSTANCE_VIEW_UNSPECIFIED (0): + INSTANCE_VIEW_UNSPECIFIED Not specified, equivalent to + BASIC. + INSTANCE_VIEW_BASIC (1): + BASIC server responses for a primary or read + instance include all the relevant instance + details, excluding the details of each node in + the instance. The default value. + INSTANCE_VIEW_FULL (2): + FULL response is equivalent to BASIC for + primary instance (for now). For read pool + instance, this includes details of each node in + the pool. + """ + INSTANCE_VIEW_UNSPECIFIED = 0 + INSTANCE_VIEW_BASIC = 1 + INSTANCE_VIEW_FULL = 2 + + +class UserPassword(proto.Message): + r"""The username/password for a database user. Used for + specifying initial users at cluster creation time. + + Attributes: + user (str): + The database username. + password (str): + The initial password for the user. + """ + + user: str = proto.Field( + proto.STRING, + number=1, + ) + password: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MigrationSource(proto.Message): + r"""Subset of the source instance configuration that is available + when reading the cluster resource. + + Attributes: + host_port (str): + Output only. The host and port of the + on-premises instance in host:port format + reference_id (str): + Output only. Place holder for the external + source identifier(e.g DMS job name) that created + the cluster. + source_type (google.cloud.alloydb_v1beta.types.MigrationSource.MigrationSourceType): + Output only. Type of migration source. + """ + + class MigrationSourceType(proto.Enum): + r"""Denote the type of migration source that created this + cluster. + + Values: + MIGRATION_SOURCE_TYPE_UNSPECIFIED (0): + Migration source is unknown. + DMS (1): + DMS source means the cluster was created via + DMS migration job. + """ + MIGRATION_SOURCE_TYPE_UNSPECIFIED = 0 + DMS = 1 + + host_port: str = proto.Field( + proto.STRING, + number=1, + ) + reference_id: str = proto.Field( + proto.STRING, + number=2, + ) + source_type: MigrationSourceType = proto.Field( + proto.ENUM, + number=3, + enum=MigrationSourceType, + ) + + +class EncryptionConfig(proto.Message): + r"""EncryptionConfig describes the encryption config of a cluster + or a backup that is encrypted with a CMEK (customer-managed + encryption key). + + Attributes: + kms_key_name (str): + The fully-qualified resource name of the KMS key. Each Cloud + KMS key is regionalized and has the following format: + projects/[PROJECT]/locations/[REGION]/keyRings/[RING]/cryptoKeys/[KEY_NAME] + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class EncryptionInfo(proto.Message): + r"""EncryptionInfo describes the encryption information of a + cluster or a backup. + + Attributes: + encryption_type (google.cloud.alloydb_v1beta.types.EncryptionInfo.Type): + Output only. Type of encryption. + kms_key_versions (MutableSequence[str]): + Output only. Cloud KMS key versions that are + being used to protect the database or the + backup. + """ + + class Type(proto.Enum): + r"""Possible encryption types. + + Values: + TYPE_UNSPECIFIED (0): + Encryption type not specified. Defaults to + GOOGLE_DEFAULT_ENCRYPTION. + GOOGLE_DEFAULT_ENCRYPTION (1): + The data is encrypted at rest with a key that + is fully managed by Google. No key version will + be populated. This is the default state. + CUSTOMER_MANAGED_ENCRYPTION (2): + The data is encrypted at rest with a key that + is managed by the customer. KMS key versions + will be populated. + """ + TYPE_UNSPECIFIED = 0 + GOOGLE_DEFAULT_ENCRYPTION = 1 + CUSTOMER_MANAGED_ENCRYPTION = 2 + + encryption_type: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + kms_key_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class SslConfig(proto.Message): + r"""SSL configuration for an AlloyDB Cluster. + + Attributes: + ssl_mode (google.cloud.alloydb_v1beta.types.SslConfig.SslMode): + Optional. SSL mode. Specifies client-server + SSL/TLS connection behavior. + ca_source (google.cloud.alloydb_v1beta.types.SslConfig.CaSource): + Optional. Certificate Authority (CA) source. Only + CA_SOURCE_MANAGED is supported currently, and is the default + value. + """ + + class SslMode(proto.Enum): + r"""SSL mode options. + + Values: + SSL_MODE_UNSPECIFIED (0): + SSL mode not specified. Defaults to SSL_MODE_ALLOW. + SSL_MODE_ALLOW (1): + SSL connections are optional. CA verification + not enforced. + SSL_MODE_REQUIRE (2): + SSL connections are required. CA verification + not enforced. Clients may use locally + self-signed certificates (default psql client + behavior). + SSL_MODE_VERIFY_CA (3): + SSL connections are required. CA verification + enforced. Clients must have certificates signed + by a Cluster CA, e.g. via + GenerateClientCertificate. + """ + SSL_MODE_UNSPECIFIED = 0 + SSL_MODE_ALLOW = 1 + SSL_MODE_REQUIRE = 2 + SSL_MODE_VERIFY_CA = 3 + + class CaSource(proto.Enum): + r"""Certificate Authority (CA) source for SSL/TLS certificates. + + Values: + CA_SOURCE_UNSPECIFIED (0): + Certificate Authority (CA) source not specified. Defaults to + CA_SOURCE_MANAGED. + CA_SOURCE_MANAGED (1): + Certificate Authority (CA) managed by the + AlloyDB Cluster. + """ + CA_SOURCE_UNSPECIFIED = 0 + CA_SOURCE_MANAGED = 1 + + ssl_mode: SslMode = proto.Field( + proto.ENUM, + number=1, + enum=SslMode, + ) + ca_source: CaSource = proto.Field( + proto.ENUM, + number=2, + enum=CaSource, + ) + + +class AutomatedBackupPolicy(proto.Message): + r"""Message describing the user-specified automated backup + policy. + All fields in the automated backup policy are optional. Defaults + for each field are provided if they are not set. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + weekly_schedule (google.cloud.alloydb_v1beta.types.AutomatedBackupPolicy.WeeklySchedule): + Weekly schedule for the Backup. + + This field is a member of `oneof`_ ``schedule``. + time_based_retention (google.cloud.alloydb_v1beta.types.AutomatedBackupPolicy.TimeBasedRetention): + Time-based Backup retention policy. + + This field is a member of `oneof`_ ``retention``. + quantity_based_retention (google.cloud.alloydb_v1beta.types.AutomatedBackupPolicy.QuantityBasedRetention): + Quantity-based Backup retention policy to + retain recent backups. + + This field is a member of `oneof`_ ``retention``. + enabled (bool): + Whether automated automated backups are + enabled. If not set, defaults to true. + + This field is a member of `oneof`_ ``_enabled``. + backup_window (google.protobuf.duration_pb2.Duration): + The length of the time window during which a + backup can be taken. If a backup does not + succeed within this time window, it will be + canceled and considered failed. + + The backup window must be at least 5 minutes + long. There is no upper bound on the window. If + not set, it defaults to 1 hour. + encryption_config (google.cloud.alloydb_v1beta.types.EncryptionConfig): + Optional. The encryption config can be + specified to encrypt the backups with a + customer-managed encryption key (CMEK). When + this field is not specified, the backup will + then use default encryption scheme to protect + the user data. + location (str): + The location where the backup will be stored. + Currently, the only supported option is to store + the backup in the same region as the cluster. + If empty, defaults to the region of the cluster. + labels (MutableMapping[str, str]): + Labels to apply to backups created using this + configuration. + """ + + class WeeklySchedule(proto.Message): + r"""A weekly schedule starts a backup at prescribed start times within a + day, for the specified days of the week. + + The weekly schedule message is flexible and can be used to create + many types of schedules. For example, to have a daily backup that + starts at 22:00, configure the ``start_times`` field to have one + element "22:00" and the ``days_of_week`` field to have all seven + days of the week. + + Attributes: + start_times (MutableSequence[google.type.timeofday_pb2.TimeOfDay]): + The times during the day to start a backup. + The start times are assumed to be in UTC and to + be an exact hour (e.g., 04:00:00). + If no start times are provided, a single fixed + start time is chosen arbitrarily. + days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + The days of the week to perform a backup. + If this field is left empty, the default of + every day of the week is used. + """ + + start_times: MutableSequence[timeofday_pb2.TimeOfDay] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=timeofday_pb2.TimeOfDay, + ) + days_of_week: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=dayofweek_pb2.DayOfWeek, + ) + + class TimeBasedRetention(proto.Message): + r"""A time based retention policy specifies that all backups + within a certain time period should be retained. + + Attributes: + retention_period (google.protobuf.duration_pb2.Duration): + The retention period. + """ + + retention_period: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + class QuantityBasedRetention(proto.Message): + r"""A quantity based policy specifies that a certain number of + the most recent successful backups should be retained. + + Attributes: + count (int): + The number of backups to retain. + """ + + count: int = proto.Field( + proto.INT32, + number=1, + ) + + weekly_schedule: WeeklySchedule = proto.Field( + proto.MESSAGE, + number=2, + oneof="schedule", + message=WeeklySchedule, + ) + time_based_retention: TimeBasedRetention = proto.Field( + proto.MESSAGE, + number=4, + oneof="retention", + message=TimeBasedRetention, + ) + quantity_based_retention: QuantityBasedRetention = proto.Field( + proto.MESSAGE, + number=5, + oneof="retention", + message=QuantityBasedRetention, + ) + enabled: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + backup_window: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + encryption_config: "EncryptionConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="EncryptionConfig", + ) + location: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + + +class ContinuousBackupConfig(proto.Message): + r"""ContinuousBackupConfig describes the continuous backups + recovery configurations of a cluster. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enabled (bool): + Whether ContinuousBackup is enabled. + + This field is a member of `oneof`_ ``_enabled``. + recovery_window_days (int): + The number of days backups and logs will be + retained, which determines the window of time + that data is recoverable for. If not set, it + defaults to 14 days. + encryption_config (google.cloud.alloydb_v1beta.types.EncryptionConfig): + The encryption config can be specified to + encrypt the backups with a customer-managed + encryption key (CMEK). When this field is not + specified, the backup will then use default + encryption scheme to protect the user data. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + recovery_window_days: int = proto.Field( + proto.INT32, + number=4, + ) + encryption_config: "EncryptionConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="EncryptionConfig", + ) + + +class ContinuousBackupInfo(proto.Message): + r"""ContinuousBackupInfo describes the continuous backup + properties of a cluster. + + Attributes: + encryption_info (google.cloud.alloydb_v1beta.types.EncryptionInfo): + Output only. The encryption information for + the WALs and backups required for + ContinuousBackup. + enabled_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. When ContinuousBackup was most + recently enabled. Set to null if + ContinuousBackup is not enabled. + schedule (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Output only. Days of the week on which a + continuous backup is taken. Output only field. + Ignored if passed into the request. + """ + + encryption_info: "EncryptionInfo" = proto.Field( + proto.MESSAGE, + number=1, + message="EncryptionInfo", + ) + enabled_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + schedule: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=3, + enum=dayofweek_pb2.DayOfWeek, + ) + + +class BackupSource(proto.Message): + r"""Message describing a BackupSource. + + Attributes: + backup_uid (str): + Output only. The system-generated UID of the + backup which was used to create this resource. + The UID is generated when the backup is created, + and it is retained until the backup is deleted. + backup_name (str): + Required. The name of the backup resource with the format: + + - projects/{project}/locations/{region}/backups/{backup_id} + """ + + backup_uid: str = proto.Field( + proto.STRING, + number=2, + ) + backup_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ContinuousBackupSource(proto.Message): + r"""Message describing a ContinuousBackupSource. + + Attributes: + cluster (str): + Required. The source cluster from which to + restore. This cluster must have continuous + backup enabled for this operation to succeed. + For the required format, see the comment on the + Cluster.name field. + point_in_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The point in time to restore to. + """ + + cluster: str = proto.Field( + proto.STRING, + number=1, + ) + point_in_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class Cluster(proto.Message): + r"""A cluster is a collection of regional AlloyDB resources. It + can include a primary instance and one or more read pool + instances. All cluster resources share a storage layer, which + scales as needed. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backup_source (google.cloud.alloydb_v1beta.types.BackupSource): + Output only. Cluster created from backup. + + This field is a member of `oneof`_ ``source``. + migration_source (google.cloud.alloydb_v1beta.types.MigrationSource): + Output only. Cluster created via DMS + migration. + + This field is a member of `oneof`_ ``source``. + name (str): + Output only. The name of the cluster resource with the + format: + + - projects/{project}/locations/{region}/clusters/{cluster_id} + where the cluster ID segment should satisfy the regex + expression ``[a-z0-9-]+``. For more details see + https://google.aip.dev/122. The prefix of the cluster + resource name is the name of the parent resource: + - projects/{project}/locations/{region} + display_name (str): + User-settable and human-readable display name + for the Cluster. + uid (str): + Output only. The system-generated UID of the + resource. The UID is assigned when the resource + is created, and it is retained until it is + deleted. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Delete time stamp + labels (MutableMapping[str, str]): + Labels as key value pairs + state (google.cloud.alloydb_v1beta.types.Cluster.State): + Output only. The current serving state of the + cluster. + cluster_type (google.cloud.alloydb_v1beta.types.Cluster.ClusterType): + Output only. The type of the cluster. This is an output-only + field and it's populated at the Cluster creation time or the + Cluster promotion time. The cluster type is determined by + which RPC was used to create the cluster (i.e. + ``CreateCluster`` vs. ``CreateSecondaryCluster`` + database_version (google.cloud.alloydb_v1beta.types.DatabaseVersion): + Output only. The database engine major + version. This is an output-only field and it's + populated at the Cluster creation time. This + field cannot be changed after cluster creation. + network (str): + Required. The resource link for the VPC network in which + cluster resources are created and from which they are + accessible via Private IP. The network must belong to the + same project as the cluster. It is specified in the form: + "projects/{project_number}/global/networks/{network_id}". + This is required to create a cluster. It can be updated, but + it cannot be removed. + etag (str): + For Resource freshness validation + (https://google.aip.dev/154) + annotations (MutableMapping[str, str]): + Annotations to allow client tools to store + small amount of arbitrary data. This is distinct + from labels. https://google.aip.dev/128 + reconciling (bool): + Output only. Reconciling + (https://google.aip.dev/128#reconciliation). Set + to true if the current state of Cluster does not + match the user's intended state, and the service + is actively updating the resource to reconcile + them. This can happen due to user-triggered + updates or system actions like failover or + maintenance. + initial_user (google.cloud.alloydb_v1beta.types.UserPassword): + Input only. Initial user to setup during cluster creation. + Required. If used in ``RestoreCluster`` this is ignored. + automated_backup_policy (google.cloud.alloydb_v1beta.types.AutomatedBackupPolicy): + The automated backup policy for this cluster. + If no policy is provided then the default policy + will be used. If backups are supported for the + cluster, the default policy takes one backup a + day, has a backup window of 1 hour, and retains + backups for 14 days. For more information on the + defaults, consult the documentation for the + message type. + ssl_config (google.cloud.alloydb_v1beta.types.SslConfig): + SSL configuration for this AlloyDB Cluster. + encryption_config (google.cloud.alloydb_v1beta.types.EncryptionConfig): + Optional. The encryption config can be + specified to encrypt the data disks and other + persistent data resources of a cluster with a + customer-managed encryption key (CMEK). When + this field is not specified, the cluster will + then use default encryption scheme to protect + the user data. + encryption_info (google.cloud.alloydb_v1beta.types.EncryptionInfo): + Output only. The encryption information for + the cluster. + continuous_backup_config (google.cloud.alloydb_v1beta.types.ContinuousBackupConfig): + Optional. Continuous backup configuration for + this cluster. + continuous_backup_info (google.cloud.alloydb_v1beta.types.ContinuousBackupInfo): + Output only. Continuous backup properties for + this cluster. + secondary_config (google.cloud.alloydb_v1beta.types.Cluster.SecondaryConfig): + Cross Region replication config specific to + SECONDARY cluster. + primary_config (google.cloud.alloydb_v1beta.types.Cluster.PrimaryConfig): + Output only. Cross Region replication config + specific to PRIMARY cluster. + """ + + class State(proto.Enum): + r"""Cluster State + + Values: + STATE_UNSPECIFIED (0): + The state of the cluster is unknown. + READY (1): + The cluster is active and running. + STOPPED (2): + The cluster is stopped. All instances in the + cluster are stopped. Customers can start a + stopped cluster at any point and all their + instances will come back to life with same names + and IP resources. In this state, customer pays + for storage. + Associated backups could also be present in a + stopped cluster. + EMPTY (3): + The cluster is empty and has no associated + resources. All instances, associated storage and + backups have been deleted. + CREATING (4): + The cluster is being created. + DELETING (5): + The cluster is being deleted. + FAILED (6): + The creation of the cluster failed. + BOOTSTRAPPING (7): + The cluster is bootstrapping with data from + some other source. Direct mutations to the + cluster (e.g. adding read pool) are not allowed. + MAINTENANCE (8): + The cluster is under maintenance. AlloyDB + regularly performs maintenance and upgrades on + customer clusters. Updates on the cluster are + not allowed while the cluster is in this state. + PROMOTING (9): + The cluster is being promoted. + """ + STATE_UNSPECIFIED = 0 + READY = 1 + STOPPED = 2 + EMPTY = 3 + CREATING = 4 + DELETING = 5 + FAILED = 6 + BOOTSTRAPPING = 7 + MAINTENANCE = 8 + PROMOTING = 9 + + class ClusterType(proto.Enum): + r"""Type of Cluster + + Values: + CLUSTER_TYPE_UNSPECIFIED (0): + The type of the cluster is unknown. + PRIMARY (1): + Primary cluster that support read and write + operations. + SECONDARY (2): + Secondary cluster that is replicating from + another region. This only supports read. + """ + CLUSTER_TYPE_UNSPECIFIED = 0 + PRIMARY = 1 + SECONDARY = 2 + + class SecondaryConfig(proto.Message): + r"""Configuration information for the secondary cluster. This + should be set if and only if the cluster is of type SECONDARY. + + Attributes: + primary_cluster_name (str): + The name of the primary cluster name with the format: + + - projects/{project}/locations/{region}/clusters/{cluster_id} + """ + + primary_cluster_name: str = proto.Field( + proto.STRING, + number=1, + ) + + class PrimaryConfig(proto.Message): + r"""Configuration for the primary cluster. It has the list of + clusters that are replicating from this cluster. This should be + set if and only if the cluster is of type PRIMARY. + + Attributes: + secondary_cluster_names (MutableSequence[str]): + Output only. Names of the clusters that are + replicating from this cluster. + """ + + secondary_cluster_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + backup_source: "BackupSource" = proto.Field( + proto.MESSAGE, + number=15, + oneof="source", + message="BackupSource", + ) + migration_source: "MigrationSource" = proto.Field( + proto.MESSAGE, + number=16, + oneof="source", + message="MigrationSource", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + cluster_type: ClusterType = proto.Field( + proto.ENUM, + number=24, + enum=ClusterType, + ) + database_version: "DatabaseVersion" = proto.Field( + proto.ENUM, + number=9, + enum="DatabaseVersion", + ) + network: str = proto.Field( + proto.STRING, + number=10, + ) + etag: str = proto.Field( + proto.STRING, + number=11, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=12, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=13, + ) + initial_user: "UserPassword" = proto.Field( + proto.MESSAGE, + number=14, + message="UserPassword", + ) + automated_backup_policy: "AutomatedBackupPolicy" = proto.Field( + proto.MESSAGE, + number=17, + message="AutomatedBackupPolicy", + ) + ssl_config: "SslConfig" = proto.Field( + proto.MESSAGE, + number=18, + message="SslConfig", + ) + encryption_config: "EncryptionConfig" = proto.Field( + proto.MESSAGE, + number=19, + message="EncryptionConfig", + ) + encryption_info: "EncryptionInfo" = proto.Field( + proto.MESSAGE, + number=20, + message="EncryptionInfo", + ) + continuous_backup_config: "ContinuousBackupConfig" = proto.Field( + proto.MESSAGE, + number=27, + message="ContinuousBackupConfig", + ) + continuous_backup_info: "ContinuousBackupInfo" = proto.Field( + proto.MESSAGE, + number=28, + message="ContinuousBackupInfo", + ) + secondary_config: SecondaryConfig = proto.Field( + proto.MESSAGE, + number=22, + message=SecondaryConfig, + ) + primary_config: PrimaryConfig = proto.Field( + proto.MESSAGE, + number=23, + message=PrimaryConfig, + ) + + +class Instance(proto.Message): + r"""An Instance is a computing unit that an end customer can + connect to. It's the main unit of computing resources in + AlloyDB. + + Attributes: + name (str): + Output only. The name of the instance resource with the + format: + + - projects/{project}/locations/{region}/clusters/{cluster_id}/instances/{instance_id} + where the cluster and instance ID segments should satisfy + the regex expression ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?``, + e.g. 1-63 characters of lowercase letters, numbers, and + dashes, starting with a letter, and ending with a letter + or number. For more details see + https://google.aip.dev/122. The prefix of the instance + resource name is the name of the parent resource: + - projects/{project}/locations/{region}/clusters/{cluster_id} + display_name (str): + User-settable and human-readable display name + for the Instance. + uid (str): + Output only. The system-generated UID of the + resource. The UID is assigned when the resource + is created, and it is retained until it is + deleted. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Delete time stamp + labels (MutableMapping[str, str]): + Labels as key value pairs + state (google.cloud.alloydb_v1beta.types.Instance.State): + Output only. The current serving state of the + instance. + instance_type (google.cloud.alloydb_v1beta.types.Instance.InstanceType): + Required. The type of the instance. Specified + at creation time. + machine_config (google.cloud.alloydb_v1beta.types.Instance.MachineConfig): + Configurations for the machines that host the + underlying database engine. + availability_type (google.cloud.alloydb_v1beta.types.Instance.AvailabilityType): + Availability type of an Instance. + Defaults to REGIONAL for both primary and read + instances. Note that primary and read instances + can have different availability types. + gce_zone (str): + The Compute Engine zone that the instance + should serve from, per + https://cloud.google.com/compute/docs/regions-zones + This can ONLY be specified for ZONAL instances. + If present for a REGIONAL instance, an error + will be thrown. If this is absent for a ZONAL + instance, instance is created in a random zone + with available capacity. + database_flags (MutableMapping[str, str]): + Database flags. Set at instance level. + + - They are copied from primary instance on read instance + creation. + - Read instances can set new or override existing flags + that are relevant for reads, e.g. for enabling columnar + cache on a read instance. Flags set on read instance may + or may not be present on primary. + + This is a list of "key": "value" pairs. "key": The name of + the flag. These flags are passed at instance setup time, so + include both server options and system variables for + Postgres. Flags are specified with underscores, not hyphens. + "value": The value of the flag. Booleans are set to **on** + for true and **off** for false. This field must be omitted + if the flag doesn't take a value. + writable_node (google.cloud.alloydb_v1beta.types.Instance.Node): + Output only. This is set for the read-write + VM of the PRIMARY instance only. + nodes (MutableSequence[google.cloud.alloydb_v1beta.types.Instance.Node]): + Output only. List of available read-only VMs + in this instance, including the standby for a + PRIMARY instance. + query_insights_config (google.cloud.alloydb_v1beta.types.Instance.QueryInsightsInstanceConfig): + Configuration for query insights. + read_pool_config (google.cloud.alloydb_v1beta.types.Instance.ReadPoolConfig): + Read pool specific config. + ip_address (str): + Output only. The IP address for the Instance. + This is the connection endpoint for an end-user + application. + reconciling (bool): + Output only. Reconciling + (https://google.aip.dev/128#reconciliation). Set + to true if the current state of Instance does + not match the user's intended state, and the + service is actively updating the resource to + reconcile them. This can happen due to + user-triggered updates or system actions like + failover or maintenance. + etag (str): + For Resource freshness validation + (https://google.aip.dev/154) + annotations (MutableMapping[str, str]): + Annotations to allow client tools to store + small amount of arbitrary data. This is distinct + from labels. https://google.aip.dev/128 + """ + + class State(proto.Enum): + r"""Instance State + + Values: + STATE_UNSPECIFIED (0): + The state of the instance is unknown. + READY (1): + The instance is active and running. + STOPPED (2): + The instance is stopped. Instance name and IP + resources are preserved. + CREATING (3): + The instance is being created. + DELETING (4): + The instance is being deleted. + MAINTENANCE (5): + The instance is down for maintenance. + FAILED (6): + The creation of the instance failed or a + fatal error occurred during an operation on the + instance. Note: Instances in this state would + tried to be auto-repaired. And Customers should + be able to restart, update or delete these + instances. + BOOTSTRAPPING (8): + Index 7 is used in the producer apis for ROLLED_BACK state. + Keeping that index unused in case that state also needs to + exposed via consumer apis in future. The instance has been + configured to sync data from some other source. + PROMOTING (9): + The instance is being promoted. + """ + STATE_UNSPECIFIED = 0 + READY = 1 + STOPPED = 2 + CREATING = 3 + DELETING = 4 + MAINTENANCE = 5 + FAILED = 6 + BOOTSTRAPPING = 8 + PROMOTING = 9 + + class InstanceType(proto.Enum): + r"""Type of an Instance + + Values: + INSTANCE_TYPE_UNSPECIFIED (0): + The type of the instance is unknown. + PRIMARY (1): + PRIMARY instances support read and write + operations. + READ_POOL (2): + READ POOL instances support read operations only. Each read + pool instance consists of one or more homogeneous nodes. + + - Read pool of size 1 can only have zonal availability. + - Read pools with node count of 2 or more can have regional + availability (nodes are present in 2 or more zones in a + region). + SECONDARY (3): + SECONDARY instances support read operations + only. SECONDARY instance is a cross-region read + replica + """ + INSTANCE_TYPE_UNSPECIFIED = 0 + PRIMARY = 1 + READ_POOL = 2 + SECONDARY = 3 + + class AvailabilityType(proto.Enum): + r"""The Availability type of an instance. Potential values: + - ZONAL: The instance serves data from only one zone. Outages in + that zone affect instance availability. + - REGIONAL: The instance can serve data from more than one zone + in a region (it is highly available). + + Values: + AVAILABILITY_TYPE_UNSPECIFIED (0): + This is an unknown Availability type. + ZONAL (1): + Zonal available instance. + REGIONAL (2): + Regional (or Highly) available instance. + """ + AVAILABILITY_TYPE_UNSPECIFIED = 0 + ZONAL = 1 + REGIONAL = 2 + + class MachineConfig(proto.Message): + r"""MachineConfig describes the configuration of a machine. + + Attributes: + cpu_count (int): + The number of CPU's in the VM instance. + """ + + cpu_count: int = proto.Field( + proto.INT32, + number=1, + ) + + class Node(proto.Message): + r"""Details of a single node in the instance. + Nodes in an AlloyDB instance are ephemereal, they can change + during update, failover, autohealing and resize operations. + + Attributes: + zone_id (str): + The Compute Engine zone of the VM e.g. + "us-central1-b". + id (str): + The identifier of the VM e.g. + "test-read-0601-407e52be-ms3l". + ip (str): + The private IP address of the VM e.g. + "10.57.0.34". + state (str): + Determined by state of the compute VM and + postgres-service health. Compute VM state can + have values listed in + https://cloud.google.com/compute/docs/instances/instance-life-cycle + and postgres-service health can have values: + HEALTHY and UNHEALTHY. + """ + + zone_id: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + ip: str = proto.Field( + proto.STRING, + number=3, + ) + state: str = proto.Field( + proto.STRING, + number=4, + ) + + class QueryInsightsInstanceConfig(proto.Message): + r"""QueryInsights Instance specific configuration. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + record_application_tags (bool): + Record application tags for an instance. + This flag is turned "on" by default. + + This field is a member of `oneof`_ ``_record_application_tags``. + record_client_address (bool): + Record client address for an instance. Client + address is PII information. This flag is turned + "on" by default. + + This field is a member of `oneof`_ ``_record_client_address``. + query_string_length (int): + Query string length. The default value is + 1024. Any integer between 256 and 4500 is + considered valid. + query_plans_per_minute (int): + Number of query execution plans captured by + Insights per minute for all queries combined. + The default value is 5. Any integer between 0 + and 20 is considered valid. + + This field is a member of `oneof`_ ``_query_plans_per_minute``. + """ + + record_application_tags: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) + record_client_address: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + query_string_length: int = proto.Field( + proto.UINT32, + number=4, + ) + query_plans_per_minute: int = proto.Field( + proto.UINT32, + number=5, + optional=True, + ) + + class ReadPoolConfig(proto.Message): + r"""Configuration for a read pool instance. + + Attributes: + node_count (int): + Read capacity, i.e. number of nodes in a read + pool instance. + """ + + node_count: int = proto.Field( + proto.INT32, + number=1, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + instance_type: InstanceType = proto.Field( + proto.ENUM, + number=9, + enum=InstanceType, + ) + machine_config: MachineConfig = proto.Field( + proto.MESSAGE, + number=10, + message=MachineConfig, + ) + availability_type: AvailabilityType = proto.Field( + proto.ENUM, + number=11, + enum=AvailabilityType, + ) + gce_zone: str = proto.Field( + proto.STRING, + number=12, + ) + database_flags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + writable_node: Node = proto.Field( + proto.MESSAGE, + number=19, + message=Node, + ) + nodes: MutableSequence[Node] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message=Node, + ) + query_insights_config: QueryInsightsInstanceConfig = proto.Field( + proto.MESSAGE, + number=21, + message=QueryInsightsInstanceConfig, + ) + read_pool_config: ReadPoolConfig = proto.Field( + proto.MESSAGE, + number=14, + message=ReadPoolConfig, + ) + ip_address: str = proto.Field( + proto.STRING, + number=15, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=16, + ) + etag: str = proto.Field( + proto.STRING, + number=17, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=18, + ) + + +class ConnectionInfo(proto.Message): + r"""ConnectionInfo singleton resource. + https://google.aip.dev/156 + + Attributes: + name (str): + The name of the ConnectionInfo singleton resource, e.g.: + projects/{project}/locations/{location}/clusters/\ */instances/*/connectionInfo + This field currently has no semantic meaning. + ip_address (str): + Output only. The IP address for the Instance. + This is the connection endpoint for an end-user + application. + pem_certificate_chain (MutableSequence[str]): + Output only. The pem-encoded chain that may + be used to verify the X.509 certificate. + Expected to be in issuer-to-root order according + to RFC 5246. + instance_uid (str): + Output only. The unique ID of the Instance. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + ip_address: str = proto.Field( + proto.STRING, + number=2, + ) + pem_certificate_chain: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + instance_uid: str = proto.Field( + proto.STRING, + number=4, + ) + + +class Backup(proto.Message): + r"""Message describing Backup object + + Attributes: + name (str): + Output only. The name of the backup resource with the + format: + + - projects/{project}/locations/{region}/backups/{backup_id} + where the cluster and backup ID segments should satisfy + the regex expression ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?``, + e.g. 1-63 characters of lowercase letters, numbers, and + dashes, starting with a letter, and ending with a letter + or number. For more details see + https://google.aip.dev/122. The prefix of the backup + resource name is the name of the parent resource: + - projects/{project}/locations/{region} + display_name (str): + User-settable and human-readable display name + for the Backup. + uid (str): + Output only. The system-generated UID of the + resource. The UID is assigned when the resource + is created, and it is retained until it is + deleted. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Delete time stamp + labels (MutableMapping[str, str]): + Labels as key value pairs + state (google.cloud.alloydb_v1beta.types.Backup.State): + Output only. The current state of the backup. + type_ (google.cloud.alloydb_v1beta.types.Backup.Type): + The backup type, which suggests the trigger + for the backup. + description (str): + User-provided description of the backup. + cluster_uid (str): + Output only. The system-generated UID of the + cluster which was used to create this resource. + cluster_name (str): + Required. The full resource name of the backup source + cluster (e.g., projects//locations//clusters/). + reconciling (bool): + Output only. Reconciling + (https://google.aip.dev/128#reconciliation), if + true, indicates that the service is actively + updating the resource. This can happen due to + user-triggered updates or system actions like + failover or maintenance. + encryption_config (google.cloud.alloydb_v1beta.types.EncryptionConfig): + Optional. The encryption config can be + specified to encrypt the backup with a + customer-managed encryption key (CMEK). When + this field is not specified, the backup will + then use default encryption scheme to protect + the user data. + encryption_info (google.cloud.alloydb_v1beta.types.EncryptionInfo): + Output only. The encryption information for + the backup. + etag (str): + For Resource freshness validation + (https://google.aip.dev/154) + annotations (MutableMapping[str, str]): + Annotations to allow client tools to store + small amount of arbitrary data. This is distinct + from labels. https://google.aip.dev/128 + size_bytes (int): + Output only. The size of the backup in bytes. + expiry_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which after the backup is eligible + to be garbage collected. It is the duration specified by the + backup's retention policy, added to the backup's + create_time. + """ + + class State(proto.Enum): + r"""Backup State + + Values: + STATE_UNSPECIFIED (0): + The state of the backup is unknown. + READY (1): + The backup is ready. + CREATING (2): + The backup is creating. + FAILED (3): + The backup failed. + DELETING (4): + The backup is being deleted. + """ + STATE_UNSPECIFIED = 0 + READY = 1 + CREATING = 2 + FAILED = 3 + DELETING = 4 + + class Type(proto.Enum): + r"""Backup Type + + Values: + TYPE_UNSPECIFIED (0): + Backup Type is unknown. + ON_DEMAND (1): + ON_DEMAND backups that were triggered by the customer (e.g., + not AUTOMATED). + AUTOMATED (2): + AUTOMATED backups triggered by the automated + backups scheduler pursuant to an automated + backup policy. + CONTINUOUS (3): + CONTINUOUS backups triggered by the automated + backups scheduler due to a continuous backup + policy. + """ + TYPE_UNSPECIFIED = 0 + ON_DEMAND = 1 + AUTOMATED = 2 + CONTINUOUS = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=15, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + type_: Type = proto.Field( + proto.ENUM, + number=8, + enum=Type, + ) + description: str = proto.Field( + proto.STRING, + number=9, + ) + cluster_uid: str = proto.Field( + proto.STRING, + number=18, + ) + cluster_name: str = proto.Field( + proto.STRING, + number=10, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=11, + ) + encryption_config: "EncryptionConfig" = proto.Field( + proto.MESSAGE, + number=12, + message="EncryptionConfig", + ) + encryption_info: "EncryptionInfo" = proto.Field( + proto.MESSAGE, + number=13, + message="EncryptionInfo", + ) + etag: str = proto.Field( + proto.STRING, + number=14, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=16, + ) + size_bytes: int = proto.Field( + proto.INT64, + number=17, + ) + expiry_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=19, + message=timestamp_pb2.Timestamp, + ) + + +class SupportedDatabaseFlag(proto.Message): + r"""SupportedDatabaseFlag gives general information about a database + flag, like type and allowed values. This is a static value that is + defined on the server side, and it cannot be modified by callers. To + set the Database flags on a particular Instance, a caller should + modify the Instance.database_flags field. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + string_restrictions (google.cloud.alloydb_v1beta.types.SupportedDatabaseFlag.StringRestrictions): + Restriction on STRING type value. + + This field is a member of `oneof`_ ``restrictions``. + integer_restrictions (google.cloud.alloydb_v1beta.types.SupportedDatabaseFlag.IntegerRestrictions): + Restriction on INTEGER type value. + + This field is a member of `oneof`_ ``restrictions``. + name (str): + The name of the flag resource, following Google Cloud + conventions, e.g.: + + - projects/{project}/locations/{location}/flags/{flag} This + field currently has no semantic meaning. + flag_name (str): + The name of the database flag, e.g. "max_allowed_packets". + The is a possibly key for the Instance.database_flags map + field. + value_type (google.cloud.alloydb_v1beta.types.SupportedDatabaseFlag.ValueType): + + accepts_multiple_values (bool): + Whether the database flag accepts multiple + values. If true, a comma-separated list of + stringified values may be specified. + supported_db_versions (MutableSequence[google.cloud.alloydb_v1beta.types.DatabaseVersion]): + Major database engine versions for which this + flag is supported. + requires_db_restart (bool): + Whether setting or updating this flag on an + Instance requires a database restart. If a flag + that requires database restart is set, the + backend will automatically restart the database + (making sure to satisfy any availability SLO's). + """ + + class ValueType(proto.Enum): + r"""ValueType describes the semantic type of the value that the flag + accepts. Regardless of the ValueType, the Instance.database_flags + field accepts the stringified version of the value, i.e. "20" or + "3.14". + + Values: + VALUE_TYPE_UNSPECIFIED (0): + This is an unknown flag type. + STRING (1): + String type flag. + INTEGER (2): + Integer type flag. + FLOAT (3): + Float type flag. + NONE (4): + Denotes that the flag does not accept any + values. + """ + VALUE_TYPE_UNSPECIFIED = 0 + STRING = 1 + INTEGER = 2 + FLOAT = 3 + NONE = 4 + + class StringRestrictions(proto.Message): + r"""Restrictions on STRING type values + + Attributes: + allowed_values (MutableSequence[str]): + The list of allowed values, if bounded. This + field will be empty if there is a unbounded + number of allowed values. + """ + + allowed_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class IntegerRestrictions(proto.Message): + r"""Restrictions on INTEGER type values. + + Attributes: + min_value (google.protobuf.wrappers_pb2.Int64Value): + The minimum value that can be specified, if + applicable. + max_value (google.protobuf.wrappers_pb2.Int64Value): + The maximum value that can be specified, if + applicable. + """ + + min_value: wrappers_pb2.Int64Value = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.Int64Value, + ) + max_value: wrappers_pb2.Int64Value = proto.Field( + proto.MESSAGE, + number=2, + message=wrappers_pb2.Int64Value, + ) + + string_restrictions: StringRestrictions = proto.Field( + proto.MESSAGE, + number=7, + oneof="restrictions", + message=StringRestrictions, + ) + integer_restrictions: IntegerRestrictions = proto.Field( + proto.MESSAGE, + number=8, + oneof="restrictions", + message=IntegerRestrictions, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + flag_name: str = proto.Field( + proto.STRING, + number=2, + ) + value_type: ValueType = proto.Field( + proto.ENUM, + number=3, + enum=ValueType, + ) + accepts_multiple_values: bool = proto.Field( + proto.BOOL, + number=4, + ) + supported_db_versions: MutableSequence["DatabaseVersion"] = proto.RepeatedField( + proto.ENUM, + number=5, + enum="DatabaseVersion", + ) + requires_db_restart: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/service.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/service.py new file mode 100644 index 000000000000..a93eeaaa5489 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/service.py @@ -0,0 +1,1734 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.alloydb_v1beta.types import resources + +__protobuf__ = proto.module( + package="google.cloud.alloydb.v1beta", + manifest={ + "ListClustersRequest", + "ListClustersResponse", + "GetClusterRequest", + "CreateSecondaryClusterRequest", + "CreateClusterRequest", + "UpdateClusterRequest", + "DeleteClusterRequest", + "PromoteClusterRequest", + "RestoreClusterRequest", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "CreateSecondaryInstanceRequest", + "CreateInstanceRequests", + "BatchCreateInstancesRequest", + "BatchCreateInstancesResponse", + "BatchCreateInstancesMetadata", + "BatchCreateInstanceStatus", + "UpdateInstanceRequest", + "DeleteInstanceRequest", + "FailoverInstanceRequest", + "RestartInstanceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "GetBackupRequest", + "CreateBackupRequest", + "UpdateBackupRequest", + "DeleteBackupRequest", + "ListSupportedDatabaseFlagsRequest", + "ListSupportedDatabaseFlagsResponse", + "GenerateClientCertificateRequest", + "GenerateClientCertificateResponse", + "GetConnectionInfoRequest", + "OperationMetadata", + }, +) + + +class ListClustersRequest(proto.Message): + r"""Message for requesting list of Clusters + + Attributes: + parent (str): + Required. The name of the parent resource. For the required + format, see the comment on the Cluster.name field. + Additionally, you can perform an aggregated list operation + by specifying a value with the following format: + + - projects/{project}/locations/- + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Optional. Filtering results + order_by (str): + Optional. Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListClustersResponse(proto.Message): + r"""Message for response to listing Clusters + + Attributes: + clusters (MutableSequence[google.cloud.alloydb_v1beta.types.Cluster]): + The list of Cluster + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + clusters: MutableSequence[resources.Cluster] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Cluster, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetClusterRequest(proto.Message): + r"""Message for getting a Cluster + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Cluster.name field. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateSecondaryClusterRequest(proto.Message): + r""" + + Attributes: + parent (str): + Required. The name of the parent resource + (the primary cluster). For the required format, + see the comment on the Cluster.name field. + cluster_id (str): + Required. ID of the requesting object (the + secondary cluster). + cluster (google.cloud.alloydb_v1beta.types.Cluster): + Required. Configuration of the requesting + object (the secondary cluster). + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + create request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=2, + ) + cluster: resources.Cluster = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Cluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=5, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class CreateClusterRequest(proto.Message): + r"""Message for creating a Cluster + + Attributes: + parent (str): + Required. The name of the parent resource. + For the required format, see the comment on the + Cluster.name field. + cluster_id (str): + Required. ID of the requesting object. + cluster (google.cloud.alloydb_v1beta.types.Cluster): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + create request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=2, + ) + cluster: resources.Cluster = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Cluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class UpdateClusterRequest(proto.Message): + r"""Message for updating a Cluster + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Cluster resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + cluster (google.cloud.alloydb_v1beta.types.Cluster): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + update request. + allow_missing (bool): + Optional. If set to true, update succeeds even if cluster is + not found. In that case, a new cluster is created and + ``update_mask`` is ignored. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + cluster: resources.Cluster = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Cluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteClusterRequest(proto.Message): + r"""Message for deleting a Cluster + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Cluster.name field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + etag (str): + Optional. The current etag of the Cluster. + If an etag is provided and does not match the + current etag of the Cluster, deletion will be + blocked and an ABORTED error will be returned. + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + delete. + force (bool): + Optional. Whether to cascade delete child + instances for given cluster. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + force: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class PromoteClusterRequest(proto.Message): + r"""Message for promoting a Cluster + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Cluster.name field + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + etag (str): + Optional. The current etag of the Cluster. + If an etag is provided and does not match the + current etag of the Cluster, deletion will be + blocked and an ABORTED error will be returned. + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class RestoreClusterRequest(proto.Message): + r"""Message for restoring a Cluster from a backup or another + cluster at a given point in time. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backup_source (google.cloud.alloydb_v1beta.types.BackupSource): + Backup source. + + This field is a member of `oneof`_ ``source``. + continuous_backup_source (google.cloud.alloydb_v1beta.types.ContinuousBackupSource): + ContinuousBackup source. Continuous backup + needs to be enabled in the source cluster for + this operation to succeed. + + This field is a member of `oneof`_ ``source``. + parent (str): + Required. The name of the parent resource. + For the required format, see the comment on the + Cluster.name field. + cluster_id (str): + Required. ID of the requesting object. + cluster (google.cloud.alloydb_v1beta.types.Cluster): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + import request. + """ + + backup_source: resources.BackupSource = proto.Field( + proto.MESSAGE, + number=4, + oneof="source", + message=resources.BackupSource, + ) + continuous_backup_source: resources.ContinuousBackupSource = proto.Field( + proto.MESSAGE, + number=8, + oneof="source", + message=resources.ContinuousBackupSource, + ) + parent: str = proto.Field( + proto.STRING, + number=1, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=2, + ) + cluster: resources.Cluster = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Cluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=5, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class ListInstancesRequest(proto.Message): + r"""Message for requesting list of Instances + + Attributes: + parent (str): + Required. The name of the parent resource. For the required + format, see the comment on the Instance.name field. + Additionally, you can perform an aggregated list operation + by specifying a value with one of the following formats: + + - projects/{project}/locations/-/clusters/- + - projects/{project}/locations/{region}/clusters/- + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Optional. Filtering results + order_by (str): + Optional. Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInstancesResponse(proto.Message): + r"""Message for response to listing Instances + + Attributes: + instances (MutableSequence[google.cloud.alloydb_v1beta.types.Instance]): + The list of Instance + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + instances: MutableSequence[resources.Instance] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Instance, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetInstanceRequest(proto.Message): + r"""Message for getting a Instance + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Instance.name field. + view (google.cloud.alloydb_v1beta.types.InstanceView): + The view of the instance to return. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: resources.InstanceView = proto.Field( + proto.ENUM, + number=2, + enum=resources.InstanceView, + ) + + +class CreateInstanceRequest(proto.Message): + r"""Message for creating a Instance + + Attributes: + parent (str): + Required. The name of the parent resource. + For the required format, see the comment on the + Instance.name field. + instance_id (str): + Required. ID of the requesting object. + instance (google.cloud.alloydb_v1beta.types.Instance): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + create request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance: resources.Instance = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Instance, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class CreateSecondaryInstanceRequest(proto.Message): + r"""Message for creating a Secondary Instance + + Attributes: + parent (str): + Required. The name of the parent resource. + For the required format, see the comment on the + Instance.name field. + instance_id (str): + Required. ID of the requesting object. + instance (google.cloud.alloydb_v1beta.types.Instance): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + create request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance: resources.Instance = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Instance, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class CreateInstanceRequests(proto.Message): + r"""See usage below for notes. + + Attributes: + create_instance_requests (MutableSequence[google.cloud.alloydb_v1beta.types.CreateInstanceRequest]): + Required. Primary and read replica instances + to be created. This list should not be empty. + """ + + create_instance_requests: MutableSequence[ + "CreateInstanceRequest" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="CreateInstanceRequest", + ) + + +class BatchCreateInstancesRequest(proto.Message): + r"""Message for creating a batch of instances under the specified + cluster. + + Attributes: + parent (str): + Required. The name of the parent resource. + requests (google.cloud.alloydb_v1beta.types.CreateInstanceRequests): + Required. Resources being created. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: "CreateInstanceRequests" = proto.Field( + proto.MESSAGE, + number=2, + message="CreateInstanceRequests", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class BatchCreateInstancesResponse(proto.Message): + r"""Message for creating batches of instances in a cluster. + + Attributes: + instances (MutableSequence[google.cloud.alloydb_v1beta.types.Instance]): + Created instances. + """ + + instances: MutableSequence[resources.Instance] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Instance, + ) + + +class BatchCreateInstancesMetadata(proto.Message): + r"""Message for metadata that is specific to BatchCreateInstances + API. + + Attributes: + instance_targets (MutableSequence[str]): + The instances being created in the API call. + Each string in this list is the server defined + resource path for target instances in the + request and for the format of each string, see + the comment on the Instance.name field. + instance_statuses (MutableMapping[str, google.cloud.alloydb_v1beta.types.BatchCreateInstanceStatus]): + A map representing state of the instances involved in the + BatchCreateInstances operation during the operation + execution. The instance state will be in STATE_UNSPECIFIED + state if the instance has not yet been picked up for + processing. The key of the map is the name of the instance + resource. For the format, see the comment on the + Instance.name field. + """ + + instance_targets: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + instance_statuses: MutableMapping[ + str, "BatchCreateInstanceStatus" + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message="BatchCreateInstanceStatus", + ) + + +class BatchCreateInstanceStatus(proto.Message): + r"""Message for current status of an instance in the + BatchCreateInstances operation. For example, lets say a + BatchCreateInstances workflow has 4 instances, Instance1 through + Instance4. Lets also assume that 2 instances succeeded but the third + failed to create and the 4th was never picked up for creation + because of failure of the previous one. Then, resulting states would + look something like: + + 1. Instance1 = ROLLED_BACK + 2. Instance2 = ROLLED_BACK + 3. Instance3 = FAILED + 4. Instance4 = FAILED However, while the operation is running, the + instance might be in other states including PENDING_CREATE, + ACTIVE, DELETING and CREATING. The states / do not get further + updated once the operation is done. + + Attributes: + state (google.cloud.alloydb_v1beta.types.BatchCreateInstanceStatus.State): + The current state of an instance involved in the batch + create operation. Once the operation is complete, the final + state of the instances in the LRO can be one of: + + 1. ACTIVE, indicating that instances were created + successfully + 2. FAILED, indicating that a particular instance failed + creation + 3. ROLLED_BACK indicating that although the instance was + created successfully, it had to be rolled back and + deleted due to failure in other steps of the workflow. + error_msg (str): + DEPRECATED - Use the error field instead. + Error, if any error occurred and is available, + during instance creation. + error (google.rpc.status_pb2.Status): + The RPC status of the instance creation + operation. This field will be present if an + error happened during the instance creation. + type_ (google.cloud.alloydb_v1beta.types.Instance.InstanceType): + + """ + + class State(proto.Enum): + r"""State contains all valid instance states for the + BatchCreateInstances operation. This is mainly used for status + reporting through the LRO metadata. + + Values: + STATE_UNSPECIFIED (0): + The state of the instance is unknown. + PENDING_CREATE (1): + Instance is pending creation and has not yet + been picked up for processsing in the backend. + READY (2): + The instance is active and running. + CREATING (3): + The instance is being created. + DELETING (4): + The instance is being deleted. + FAILED (5): + The creation of the instance failed or a + fatal error occurred during an operation on the + instance or a batch of instances. + ROLLED_BACK (6): + The instance was created successfully, but + was rolled back and deleted due to some other + failure during BatchCreateInstances operation. + """ + STATE_UNSPECIFIED = 0 + PENDING_CREATE = 1 + READY = 2 + CREATING = 3 + DELETING = 4 + FAILED = 5 + ROLLED_BACK = 6 + + state: State = proto.Field( + proto.ENUM, + number=1, + enum=State, + ) + error_msg: str = proto.Field( + proto.STRING, + number=2, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + type_: resources.Instance.InstanceType = proto.Field( + proto.ENUM, + number=3, + enum=resources.Instance.InstanceType, + ) + + +class UpdateInstanceRequest(proto.Message): + r"""Message for updating a Instance + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Instance resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + instance (google.cloud.alloydb_v1beta.types.Instance): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + update request. + allow_missing (bool): + Optional. If set to true, update succeeds even if instance + is not found. In that case, a new instance is created and + ``update_mask`` is ignored. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + instance: resources.Instance = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Instance, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteInstanceRequest(proto.Message): + r"""Message for deleting a Instance + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Instance.name field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + etag (str): + Optional. The current etag of the Instance. + If an etag is provided and does not match the + current etag of the Instance, deletion will be + blocked and an ABORTED error will be returned. + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class FailoverInstanceRequest(proto.Message): + r"""Message for triggering failover on an Instance + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Instance.name field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + failover. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class RestartInstanceRequest(proto.Message): + r""" + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Instance.name field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request validation + (e.g. permission checks and any other type of + validation), but do not actually execute the + restart. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ListBackupsRequest(proto.Message): + r"""Message for requesting list of Backups + + Attributes: + parent (str): + Required. Parent value for ListBackupsRequest + page_size (int): + Requested page size. Server may return fewer + items than requested. If unspecified, server + will pick an appropriate default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Filtering results + order_by (str): + Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListBackupsResponse(proto.Message): + r"""Message for response to listing Backups + + Attributes: + backups (MutableSequence[google.cloud.alloydb_v1beta.types.Backup]): + The list of Backup + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backups: MutableSequence[resources.Backup] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Backup, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupRequest(proto.Message): + r"""Message for getting a Backup + + Attributes: + name (str): + Required. Name of the resource + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateBackupRequest(proto.Message): + r"""Message for creating a Backup + + Attributes: + parent (str): + Required. Value for parent. + backup_id (str): + Required. ID of the requesting object. + backup (google.cloud.alloydb_v1beta.types.Backup): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, the backend validates the + request, but doesn't actually execute it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup: resources.Backup = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Backup, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class UpdateBackupRequest(proto.Message): + r"""Message for updating a Backup + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The fields + specified in the update_mask are relative to the resource, + not the full request. A field will be overwritten if it is + in the mask. If the user does not provide a mask then all + fields will be overwritten. + backup (google.cloud.alloydb_v1beta.types.Backup): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, the backend validates the + request, but doesn't actually execute it. + allow_missing (bool): + Optional. If set to true, update succeeds even if instance + is not found. In that case, a new backup is created and + ``update_mask`` is ignored. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + backup: resources.Backup = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Backup, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteBackupRequest(proto.Message): + r"""Message for deleting a Backup + + Attributes: + name (str): + Required. Name of the resource. For the + required format, see the comment on the + Backup.name field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, the backend validates the + request, but doesn't actually execute it. + etag (str): + Optional. The current etag of the Backup. + If an etag is provided and does not match the + current etag of the Backup, deletion will be + blocked and an ABORTED error will be returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSupportedDatabaseFlagsRequest(proto.Message): + r"""Message for listing the information about the supported + Database flags. + + Attributes: + parent (str): + Required. The name of the parent resource. The required + format is: + + - projects/{project}/locations/{location} + + Regardless of the parent specified here, as long it is + contains a valid project and location, the service will + return a static list of supported flags resources. Note that + we do not yet support region-specific flags. + page_size (int): + Requested page size. Server may return fewer + items than requested. If unspecified, server + will pick an appropriate default. + page_token (str): + A token identifying a page of results the + server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSupportedDatabaseFlagsResponse(proto.Message): + r"""Message for response to listing SupportedDatabaseFlags. + + Attributes: + supported_database_flags (MutableSequence[google.cloud.alloydb_v1beta.types.SupportedDatabaseFlag]): + The list of SupportedDatabaseFlags. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + supported_database_flags: MutableSequence[ + resources.SupportedDatabaseFlag + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.SupportedDatabaseFlag, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GenerateClientCertificateRequest(proto.Message): + r"""Message for requests to generate a client certificate signed + by the Cluster CA. + + Attributes: + parent (str): + Required. The name of the parent resource. The required + format is: + + - projects/{project}/locations/{location}/clusters/{cluster} + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + pem_csr (str): + Optional. A pem-encoded X.509 certificate + signing request (CSR). + cert_duration (google.protobuf.duration_pb2.Duration): + Optional. An optional hint to the endpoint to + generate the client certificate with the + requested duration. The duration can be from 1 + hour to 24 hours. The endpoint may or may not + honor the hint. If the hint is left unspecified + or is not honored, then the endpoint will pick + an appropriate default duration. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + pem_csr: str = proto.Field( + proto.STRING, + number=3, + ) + cert_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + + +class GenerateClientCertificateResponse(proto.Message): + r"""Message returned by a GenerateClientCertificate operation. + + Attributes: + pem_certificate (str): + Output only. The pem-encoded, signed X.509 + certificate. + pem_certificate_chain (MutableSequence[str]): + Output only. The pem-encoded chain that may + be used to verify the X.509 certificate. + Expected to be in issuer-to-root order according + to RFC 5246. + """ + + pem_certificate: str = proto.Field( + proto.STRING, + number=1, + ) + pem_certificate_chain: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class GetConnectionInfoRequest(proto.Message): + r"""Request message for GetConnectionInfo. + + Attributes: + parent (str): + Required. The name of the parent resource. + The required format is: + projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance} + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + batch_create_instances_metadata (google.cloud.alloydb_v1beta.types.BatchCreateInstancesMetadata): + Output only. BatchCreateInstances related + metadata. + + This field is a member of `oneof`_ ``request_specific``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + batch_create_instances_metadata: "BatchCreateInstancesMetadata" = proto.Field( + proto.MESSAGE, + number=8, + oneof="request_specific", + message="BatchCreateInstancesMetadata", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/mypy.ini b/packages/google-cloud-alloydb/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-alloydb/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-alloydb/noxfile.py b/packages/google-cloud-alloydb/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-cloud-alloydb/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-cloud-alloydb/renovate.json b/packages/google-cloud-alloydb/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-alloydb/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_batch_create_instances_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_batch_create_instances_async.py new file mode 100644 index 000000000000..28bc0abf1aa2 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_batch_create_instances_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_BatchCreateInstances_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_batch_create_instances(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + requests = alloydb_v1.CreateInstanceRequests() + requests.create_instance_requests.parent = "parent_value" + requests.create_instance_requests.instance_id = "instance_id_value" + requests.create_instance_requests.instance.instance_type = "SECONDARY" + + request = alloydb_v1.BatchCreateInstancesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_instances(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_BatchCreateInstances_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_batch_create_instances_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_batch_create_instances_sync.py new file mode 100644 index 000000000000..05ad2b8da55b --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_batch_create_instances_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_BatchCreateInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_batch_create_instances(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + requests = alloydb_v1.CreateInstanceRequests() + requests.create_instance_requests.parent = "parent_value" + requests.create_instance_requests.instance_id = "instance_id_value" + requests.create_instance_requests.instance.instance_type = "SECONDARY" + + request = alloydb_v1.BatchCreateInstancesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_instances(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_BatchCreateInstances_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_backup_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_backup_async.py new file mode 100644 index 000000000000..f8698a0731cf --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_backup_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_CreateBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_create_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup = alloydb_v1.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + backup=backup, + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_CreateBackup_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_backup_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_backup_sync.py new file mode 100644 index 000000000000..21fbb9ebae6f --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_backup_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_CreateBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_create_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + backup = alloydb_v1.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + backup=backup, + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_CreateBackup_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_cluster_async.py new file mode 100644 index 000000000000..a3fed7e95e47 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_cluster_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_CreateCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_create_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_CreateCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_cluster_sync.py new file mode 100644 index 000000000000..a5cae37e0879 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_cluster_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_CreateCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_create_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_CreateCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_instance_async.py new file mode 100644 index 000000000000..66b357e64358 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_instance_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_CreateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_create_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_CreateInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_instance_sync.py new file mode 100644 index 000000000000..dd08bcf36766 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_create_instance_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_CreateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_create_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_CreateInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_backup_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_backup_async.py new file mode 100644 index 000000000000..0a24e9317f04 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_backup_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_DeleteBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_delete_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_DeleteBackup_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_backup_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_backup_sync.py new file mode 100644 index 000000000000..b8f2ba76bb0e --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_backup_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_DeleteBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_delete_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_DeleteBackup_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_cluster_async.py new file mode 100644 index 000000000000..1aa9cf335d8f --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_cluster_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_DeleteCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_delete_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_DeleteCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_cluster_sync.py new file mode 100644 index 000000000000..469236227932 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_cluster_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_DeleteCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_delete_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_DeleteCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_instance_async.py new file mode 100644 index 000000000000..3198130edc37 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_instance_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_DeleteInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_delete_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_DeleteInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_instance_sync.py new file mode 100644 index 000000000000..fe81e5dea19e --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_delete_instance_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_DeleteInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_delete_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_DeleteInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_failover_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_failover_instance_async.py new file mode 100644 index 000000000000..cd80ce9e6d9b --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_failover_instance_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FailoverInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_FailoverInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_failover_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.FailoverInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_FailoverInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_failover_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_failover_instance_sync.py new file mode 100644 index 000000000000..9d3130e2d8cd --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_failover_instance_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FailoverInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_FailoverInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_failover_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.FailoverInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_FailoverInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_backup_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_backup_async.py new file mode 100644 index 000000000000..16ed3c8e2ff5 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_backup_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_GetBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_get_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_GetBackup_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_backup_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_backup_sync.py new file mode 100644 index 000000000000..5b8eeb8c1cc1 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_backup_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_GetBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_get_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_GetBackup_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_cluster_async.py new file mode 100644 index 000000000000..3d4df7f16d9b --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_cluster_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_GetCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_get_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cluster(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_GetCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_cluster_sync.py new file mode 100644 index 000000000000..eb296f332c1d --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_cluster_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_GetCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_get_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_cluster(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_GetCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_instance_async.py new file mode 100644 index 000000000000..c23cb76d27a9 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_instance_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_GetInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_get_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_GetInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_instance_sync.py new file mode 100644 index 000000000000..e90c57228ff0 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_instance_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_GetInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_get_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_GetInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_backups_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_backups_async.py new file mode 100644 index 000000000000..5289464e16b5 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_backups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_ListBackups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_list_backups(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_ListBackups_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_backups_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_backups_sync.py new file mode 100644 index 000000000000..a0a461520501 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_backups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_ListBackups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_list_backups(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_ListBackups_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_clusters_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_clusters_async.py new file mode 100644 index 000000000000..16de745ce127 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_clusters_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListClusters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_ListClusters_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_list_clusters(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_ListClusters_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_clusters_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_clusters_sync.py new file mode 100644 index 000000000000..88b2668d5c0f --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_clusters_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListClusters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_ListClusters_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_list_clusters(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_ListClusters_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_instances_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_instances_async.py new file mode 100644 index 000000000000..bb76082787b6 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_instances_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_ListInstances_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_list_instances(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_ListInstances_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_instances_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_instances_sync.py new file mode 100644 index 000000000000..1ba284b4a984 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_instances_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_ListInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_list_instances(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_ListInstances_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_supported_database_flags_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_supported_database_flags_async.py new file mode 100644 index 000000000000..eed8bc7c549b --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_supported_database_flags_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSupportedDatabaseFlags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_list_supported_database_flags(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.ListSupportedDatabaseFlagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_supported_database_flags(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_supported_database_flags_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_supported_database_flags_sync.py new file mode 100644 index 000000000000..675f2f3c7475 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_list_supported_database_flags_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSupportedDatabaseFlags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_list_supported_database_flags(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.ListSupportedDatabaseFlagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_supported_database_flags(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_restart_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_restart_instance_async.py new file mode 100644 index 000000000000..c2ac0c47ada6 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_restart_instance_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestartInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_RestartInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_restart_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1.RestartInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.restart_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_RestartInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_restart_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_restart_instance_sync.py new file mode 100644 index 000000000000..ce614b2903e3 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_restart_instance_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestartInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_RestartInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_restart_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1.RestartInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.restart_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_RestartInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_restore_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_restore_cluster_async.py new file mode 100644 index 000000000000..0a0ff18266a9 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_restore_cluster_async.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_RestoreCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_restore_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup_source = alloydb_v1.BackupSource() + backup_source.backup_name = "backup_name_value" + + cluster = alloydb_v1.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1.RestoreClusterRequest( + backup_source=backup_source, + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.restore_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_RestoreCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_restore_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_restore_cluster_sync.py new file mode 100644 index 000000000000..6be42381180c --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_restore_cluster_sync.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_RestoreCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_restore_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + backup_source = alloydb_v1.BackupSource() + backup_source.backup_name = "backup_name_value" + + cluster = alloydb_v1.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1.RestoreClusterRequest( + backup_source=backup_source, + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.restore_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_RestoreCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_backup_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_backup_async.py new file mode 100644 index 000000000000..9c1a807ebc3c --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_backup_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_UpdateBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_update_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup = alloydb_v1.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1.UpdateBackupRequest( + backup=backup, + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_UpdateBackup_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_backup_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_backup_sync.py new file mode 100644 index 000000000000..fd254ca1fa45 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_backup_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_UpdateBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_update_backup(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + backup = alloydb_v1.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1.UpdateBackupRequest( + backup=backup, + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_UpdateBackup_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_cluster_async.py new file mode 100644 index 000000000000..331e3ce94c32 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_cluster_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_UpdateCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_update_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_UpdateCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_cluster_sync.py new file mode 100644 index 000000000000..c006114787a0 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_cluster_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_UpdateCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_update_cluster(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_UpdateCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_instance_async.py new file mode 100644 index 000000000000..16828754e3fb --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_instance_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_UpdateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +async def sample_update_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_UpdateInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_instance_sync.py new file mode 100644 index 000000000000..51b952a16d44 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_update_instance_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1_generated_AlloyDBAdmin_UpdateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1 + + +def sample_update_instance(): + # Create a client + client = alloydb_v1.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1_generated_AlloyDBAdmin_UpdateInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_batch_create_instances_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_batch_create_instances_async.py new file mode 100644 index 000000000000..391670e6065d --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_batch_create_instances_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_BatchCreateInstances_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_batch_create_instances(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + requests = alloydb_v1alpha.CreateInstanceRequests() + requests.create_instance_requests.parent = "parent_value" + requests.create_instance_requests.instance_id = "instance_id_value" + requests.create_instance_requests.instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.BatchCreateInstancesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_instances(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_BatchCreateInstances_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_batch_create_instances_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_batch_create_instances_sync.py new file mode 100644 index 000000000000..4a8b83e9db44 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_batch_create_instances_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_BatchCreateInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_batch_create_instances(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + requests = alloydb_v1alpha.CreateInstanceRequests() + requests.create_instance_requests.parent = "parent_value" + requests.create_instance_requests.instance_id = "instance_id_value" + requests.create_instance_requests.instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.BatchCreateInstancesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_instances(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_BatchCreateInstances_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_backup_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_backup_async.py new file mode 100644 index 000000000000..f18a18c98786 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_backup_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_CreateBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_create_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup = alloydb_v1alpha.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1alpha.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + backup=backup, + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_CreateBackup_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_backup_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_backup_sync.py new file mode 100644 index 000000000000..257f3fa2887b --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_backup_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_CreateBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_create_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + backup = alloydb_v1alpha.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1alpha.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + backup=backup, + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_CreateBackup_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_cluster_async.py new file mode 100644 index 000000000000..d0e9d9672575 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_cluster_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_CreateCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_create_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_CreateCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_cluster_sync.py new file mode 100644 index 000000000000..b13352b60270 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_cluster_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_CreateCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_create_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_CreateCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_instance_async.py new file mode 100644 index 000000000000..fb10682a0761 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_instance_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_CreateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_create_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1alpha.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_CreateInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_instance_sync.py new file mode 100644 index 000000000000..212d978d5025 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_instance_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_CreateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_create_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1alpha.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_CreateInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_secondary_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_secondary_cluster_async.py new file mode 100644 index 000000000000..3186dbbc6ebd --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_secondary_cluster_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSecondaryCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_CreateSecondaryCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_create_secondary_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.CreateSecondaryClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_secondary_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_CreateSecondaryCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_secondary_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_secondary_cluster_sync.py new file mode 100644 index 000000000000..a11ae4314c71 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_secondary_cluster_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSecondaryCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_CreateSecondaryCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_create_secondary_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.CreateSecondaryClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_secondary_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_CreateSecondaryCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_secondary_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_secondary_instance_async.py new file mode 100644 index 000000000000..e79fbf2afb93 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_secondary_instance_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSecondaryInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_CreateSecondaryInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_create_secondary_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1alpha.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.CreateSecondaryInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_secondary_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_CreateSecondaryInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_secondary_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_secondary_instance_sync.py new file mode 100644 index 000000000000..ca6cf0956096 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_secondary_instance_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSecondaryInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_CreateSecondaryInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_create_secondary_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1alpha.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.CreateSecondaryInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_secondary_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_CreateSecondaryInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_backup_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_backup_async.py new file mode 100644 index 000000000000..a64ffee8a7ff --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_backup_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_DeleteBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_delete_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_DeleteBackup_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_backup_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_backup_sync.py new file mode 100644 index 000000000000..969149d78b47 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_backup_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_DeleteBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_delete_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_DeleteBackup_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_cluster_async.py new file mode 100644 index 000000000000..78e6cd0dd1c1 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_cluster_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_DeleteCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_delete_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_DeleteCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_cluster_sync.py new file mode 100644 index 000000000000..87f19ebe62fd --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_cluster_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_DeleteCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_delete_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_DeleteCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_instance_async.py new file mode 100644 index 000000000000..b5d04b402399 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_instance_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_DeleteInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_delete_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_DeleteInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_instance_sync.py new file mode 100644 index 000000000000..960fbceca3a8 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_delete_instance_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_DeleteInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_delete_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_DeleteInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_failover_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_failover_instance_async.py new file mode 100644 index 000000000000..011baae1e0b2 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_failover_instance_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FailoverInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_FailoverInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_failover_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.FailoverInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_FailoverInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_failover_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_failover_instance_sync.py new file mode 100644 index 000000000000..76f33f85a53a --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_failover_instance_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FailoverInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_FailoverInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_failover_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.FailoverInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_FailoverInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_generate_client_certificate_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_generate_client_certificate_async.py new file mode 100644 index 000000000000..d1aecab5164d --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_generate_client_certificate_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateClientCertificate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_GenerateClientCertificate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_generate_client_certificate(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GenerateClientCertificateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.generate_client_certificate(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_GenerateClientCertificate_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_generate_client_certificate_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_generate_client_certificate_sync.py new file mode 100644 index 000000000000..188a7c69283e --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_generate_client_certificate_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateClientCertificate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_GenerateClientCertificate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_generate_client_certificate(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GenerateClientCertificateRequest( + parent="parent_value", + ) + + # Make the request + response = client.generate_client_certificate(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_GenerateClientCertificate_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_backup_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_backup_async.py new file mode 100644 index 000000000000..e81a12aeb6a5 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_backup_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_GetBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_get_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_GetBackup_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_backup_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_backup_sync.py new file mode 100644 index 000000000000..5e33495b9f16 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_backup_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_GetBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_get_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_GetBackup_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_cluster_async.py new file mode 100644 index 000000000000..8ddb1eead393 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_cluster_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_GetCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_get_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cluster(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_GetCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_cluster_sync.py new file mode 100644 index 000000000000..ae2b58640261 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_cluster_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_GetCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_get_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_cluster(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_GetCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_connection_info_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_connection_info_async.py new file mode 100644 index 000000000000..8cd4809b4484 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_connection_info_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnectionInfo +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_GetConnectionInfo_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_get_connection_info(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetConnectionInfoRequest( + parent="parent_value", + ) + + # Make the request + response = await client.get_connection_info(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_GetConnectionInfo_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_connection_info_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_connection_info_sync.py new file mode 100644 index 000000000000..0176d50a65d6 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_connection_info_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnectionInfo +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_GetConnectionInfo_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_get_connection_info(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetConnectionInfoRequest( + parent="parent_value", + ) + + # Make the request + response = client.get_connection_info(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_GetConnectionInfo_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_instance_async.py new file mode 100644 index 000000000000..958ca3156cb8 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_instance_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_GetInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_get_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_GetInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_instance_sync.py new file mode 100644 index 000000000000..7cc0cbef22da --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_get_instance_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_GetInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_get_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_GetInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_backups_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_backups_async.py new file mode 100644 index 000000000000..1ae050c61f02 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_backups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_ListBackups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_list_backups(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_ListBackups_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_backups_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_backups_sync.py new file mode 100644 index 000000000000..9e25e3a84801 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_backups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_ListBackups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_list_backups(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_ListBackups_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_clusters_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_clusters_async.py new file mode 100644 index 000000000000..0dbb9b501631 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_clusters_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListClusters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_ListClusters_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_list_clusters(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_ListClusters_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_clusters_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_clusters_sync.py new file mode 100644 index 000000000000..0012d88be0b4 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_clusters_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListClusters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_ListClusters_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_list_clusters(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_ListClusters_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_instances_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_instances_async.py new file mode 100644 index 000000000000..648dbea26272 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_instances_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_ListInstances_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_list_instances(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_ListInstances_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_instances_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_instances_sync.py new file mode 100644 index 000000000000..b789260d624f --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_instances_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_ListInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_list_instances(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_ListInstances_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_supported_database_flags_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_supported_database_flags_async.py new file mode 100644 index 000000000000..dabaa92d56fe --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_supported_database_flags_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSupportedDatabaseFlags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_list_supported_database_flags(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListSupportedDatabaseFlagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_supported_database_flags(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_supported_database_flags_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_supported_database_flags_sync.py new file mode 100644 index 000000000000..7c3c97653175 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_supported_database_flags_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSupportedDatabaseFlags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_list_supported_database_flags(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListSupportedDatabaseFlagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_supported_database_flags(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_promote_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_promote_cluster_async.py new file mode 100644 index 000000000000..e660ba135c19 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_promote_cluster_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PromoteCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_PromoteCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_promote_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.PromoteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.promote_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_PromoteCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_promote_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_promote_cluster_sync.py new file mode 100644 index 000000000000..2444c8b3b424 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_promote_cluster_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PromoteCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_PromoteCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_promote_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.PromoteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.promote_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_PromoteCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_restart_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_restart_instance_async.py new file mode 100644 index 000000000000..880b5a6ec77e --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_restart_instance_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestartInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_RestartInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_restart_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.RestartInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.restart_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_RestartInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_restart_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_restart_instance_sync.py new file mode 100644 index 000000000000..cf22a3831a20 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_restart_instance_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestartInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_RestartInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_restart_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.RestartInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.restart_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_RestartInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_restore_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_restore_cluster_async.py new file mode 100644 index 000000000000..6453c6fecee0 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_restore_cluster_async.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_RestoreCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_restore_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup_source = alloydb_v1alpha.BackupSource() + backup_source.backup_name = "backup_name_value" + + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.RestoreClusterRequest( + backup_source=backup_source, + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.restore_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_RestoreCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_restore_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_restore_cluster_sync.py new file mode 100644 index 000000000000..5c1650eab243 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_restore_cluster_sync.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_RestoreCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_restore_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + backup_source = alloydb_v1alpha.BackupSource() + backup_source.backup_name = "backup_name_value" + + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.RestoreClusterRequest( + backup_source=backup_source, + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.restore_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_RestoreCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_backup_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_backup_async.py new file mode 100644 index 000000000000..0ca590c43ef0 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_backup_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_UpdateBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_update_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup = alloydb_v1alpha.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1alpha.UpdateBackupRequest( + backup=backup, + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_UpdateBackup_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_backup_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_backup_sync.py new file mode 100644 index 000000000000..4622e8bef4a6 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_backup_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_UpdateBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_update_backup(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + backup = alloydb_v1alpha.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1alpha.UpdateBackupRequest( + backup=backup, + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_UpdateBackup_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_cluster_async.py new file mode 100644 index 000000000000..0e92c5c95168 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_cluster_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_UpdateCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_update_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_UpdateCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_cluster_sync.py new file mode 100644 index 000000000000..4c71dbf35dac --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_cluster_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_UpdateCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_update_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1alpha.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1alpha.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_UpdateCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_instance_async.py new file mode 100644 index 000000000000..7787140c4950 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_instance_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_UpdateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_update_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1alpha.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_UpdateInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_instance_sync.py new file mode 100644 index 000000000000..fa6b6477e6f7 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_update_instance_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_UpdateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_update_instance(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1alpha.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1alpha.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_UpdateInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_batch_create_instances_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_batch_create_instances_async.py new file mode 100644 index 000000000000..35c6a964fc0a --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_batch_create_instances_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_BatchCreateInstances_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_batch_create_instances(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + requests = alloydb_v1beta.CreateInstanceRequests() + requests.create_instance_requests.parent = "parent_value" + requests.create_instance_requests.instance_id = "instance_id_value" + requests.create_instance_requests.instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.BatchCreateInstancesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_instances(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_BatchCreateInstances_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_batch_create_instances_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_batch_create_instances_sync.py new file mode 100644 index 000000000000..773b0fe0e2ed --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_batch_create_instances_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_BatchCreateInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_batch_create_instances(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + requests = alloydb_v1beta.CreateInstanceRequests() + requests.create_instance_requests.parent = "parent_value" + requests.create_instance_requests.instance_id = "instance_id_value" + requests.create_instance_requests.instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.BatchCreateInstancesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_instances(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_BatchCreateInstances_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_backup_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_backup_async.py new file mode 100644 index 000000000000..82ee8ab98ed3 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_backup_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_CreateBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_create_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup = alloydb_v1beta.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1beta.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + backup=backup, + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_CreateBackup_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_backup_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_backup_sync.py new file mode 100644 index 000000000000..063a9bfc30c5 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_backup_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_CreateBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_create_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + backup = alloydb_v1beta.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1beta.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + backup=backup, + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_CreateBackup_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_cluster_async.py new file mode 100644 index 000000000000..6193bc9b66f5 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_cluster_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_CreateCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_create_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_CreateCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_cluster_sync.py new file mode 100644 index 000000000000..38ed47ccd9ca --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_cluster_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_CreateCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_create_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_CreateCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_instance_async.py new file mode 100644 index 000000000000..4a175fa29b12 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_instance_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_CreateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_create_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1beta.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_CreateInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_instance_sync.py new file mode 100644 index 000000000000..114a4c087a1d --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_instance_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_CreateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_create_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1beta.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_CreateInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_secondary_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_secondary_cluster_async.py new file mode 100644 index 000000000000..ae17405392a0 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_secondary_cluster_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSecondaryCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_CreateSecondaryCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_create_secondary_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.CreateSecondaryClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_secondary_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_CreateSecondaryCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_secondary_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_secondary_cluster_sync.py new file mode 100644 index 000000000000..fa1db1bfbffc --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_secondary_cluster_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSecondaryCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_CreateSecondaryCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_create_secondary_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.CreateSecondaryClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_secondary_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_CreateSecondaryCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_secondary_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_secondary_instance_async.py new file mode 100644 index 000000000000..de1050dae1c4 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_secondary_instance_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSecondaryInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_CreateSecondaryInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_create_secondary_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1beta.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.CreateSecondaryInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_secondary_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_CreateSecondaryInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_secondary_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_secondary_instance_sync.py new file mode 100644 index 000000000000..5a13e3995947 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_secondary_instance_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSecondaryInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_CreateSecondaryInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_create_secondary_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1beta.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.CreateSecondaryInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_secondary_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_CreateSecondaryInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_backup_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_backup_async.py new file mode 100644 index 000000000000..590dcc2a6f8e --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_backup_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_DeleteBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_delete_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_DeleteBackup_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_backup_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_backup_sync.py new file mode 100644 index 000000000000..db32fc62f0c8 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_backup_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_DeleteBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_delete_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_DeleteBackup_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_cluster_async.py new file mode 100644 index 000000000000..2d63d03d2a3a --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_cluster_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_DeleteCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_delete_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_DeleteCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_cluster_sync.py new file mode 100644 index 000000000000..5bab4fee6de8 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_cluster_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_DeleteCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_delete_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.DeleteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_DeleteCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_instance_async.py new file mode 100644 index 000000000000..77394b96d628 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_instance_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_DeleteInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_delete_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_DeleteInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_instance_sync.py new file mode 100644 index 000000000000..08937b417f9c --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_delete_instance_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_DeleteInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_delete_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_DeleteInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_failover_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_failover_instance_async.py new file mode 100644 index 000000000000..3c7c89563484 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_failover_instance_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FailoverInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_FailoverInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_failover_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.FailoverInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_FailoverInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_failover_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_failover_instance_sync.py new file mode 100644 index 000000000000..eaaee6cfb8cb --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_failover_instance_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FailoverInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_FailoverInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_failover_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.FailoverInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_FailoverInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_generate_client_certificate_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_generate_client_certificate_async.py new file mode 100644 index 000000000000..c96afe34a50b --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_generate_client_certificate_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateClientCertificate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_GenerateClientCertificate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_generate_client_certificate(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GenerateClientCertificateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.generate_client_certificate(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_GenerateClientCertificate_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_generate_client_certificate_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_generate_client_certificate_sync.py new file mode 100644 index 000000000000..b82c482a8edc --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_generate_client_certificate_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateClientCertificate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_GenerateClientCertificate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_generate_client_certificate(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GenerateClientCertificateRequest( + parent="parent_value", + ) + + # Make the request + response = client.generate_client_certificate(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_GenerateClientCertificate_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_backup_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_backup_async.py new file mode 100644 index 000000000000..740a0655c7fc --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_backup_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_GetBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_get_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_GetBackup_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_backup_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_backup_sync.py new file mode 100644 index 000000000000..fd573d542c7e --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_backup_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_GetBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_get_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_GetBackup_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_cluster_async.py new file mode 100644 index 000000000000..de7ce4a5e104 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_cluster_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_GetCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_get_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cluster(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_GetCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_cluster_sync.py new file mode 100644 index 000000000000..9ca2a70b4466 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_cluster_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_GetCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_get_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_cluster(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_GetCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_connection_info_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_connection_info_async.py new file mode 100644 index 000000000000..ccf346f83b13 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_connection_info_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnectionInfo +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_GetConnectionInfo_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_get_connection_info(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetConnectionInfoRequest( + parent="parent_value", + ) + + # Make the request + response = await client.get_connection_info(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_GetConnectionInfo_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_connection_info_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_connection_info_sync.py new file mode 100644 index 000000000000..6e0e2744a636 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_connection_info_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnectionInfo +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_GetConnectionInfo_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_get_connection_info(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetConnectionInfoRequest( + parent="parent_value", + ) + + # Make the request + response = client.get_connection_info(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_GetConnectionInfo_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_instance_async.py new file mode 100644 index 000000000000..4cccf09f97c9 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_instance_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_GetInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_get_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_GetInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_instance_sync.py new file mode 100644 index 000000000000..a7231edc1e9c --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_get_instance_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_GetInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_get_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_GetInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_backups_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_backups_async.py new file mode 100644 index 000000000000..c9bc439c0a9f --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_backups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_ListBackups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_list_backups(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_ListBackups_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_backups_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_backups_sync.py new file mode 100644 index 000000000000..d5d1f7df4ee7 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_backups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_ListBackups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_list_backups(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_ListBackups_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_clusters_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_clusters_async.py new file mode 100644 index 000000000000..97d2f5ad7b58 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_clusters_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListClusters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_ListClusters_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_list_clusters(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_ListClusters_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_clusters_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_clusters_sync.py new file mode 100644 index 000000000000..2b9c3be9ab20 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_clusters_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListClusters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_ListClusters_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_list_clusters(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_ListClusters_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_instances_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_instances_async.py new file mode 100644 index 000000000000..fced2d7e2de9 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_instances_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_ListInstances_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_list_instances(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_ListInstances_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_instances_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_instances_sync.py new file mode 100644 index 000000000000..ffc4f9b0d6a0 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_instances_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_ListInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_list_instances(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_ListInstances_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_supported_database_flags_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_supported_database_flags_async.py new file mode 100644 index 000000000000..097152252e59 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_supported_database_flags_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSupportedDatabaseFlags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_list_supported_database_flags(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListSupportedDatabaseFlagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_supported_database_flags(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_supported_database_flags_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_supported_database_flags_sync.py new file mode 100644 index 000000000000..164c86ff4223 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_list_supported_database_flags_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSupportedDatabaseFlags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_list_supported_database_flags(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ListSupportedDatabaseFlagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_supported_database_flags(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_promote_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_promote_cluster_async.py new file mode 100644 index 000000000000..3486a319bce5 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_promote_cluster_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PromoteCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_PromoteCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_promote_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.PromoteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.promote_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_PromoteCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_promote_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_promote_cluster_sync.py new file mode 100644 index 000000000000..746812b418eb --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_promote_cluster_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PromoteCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_PromoteCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_promote_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.PromoteClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.promote_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_PromoteCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_restart_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_restart_instance_async.py new file mode 100644 index 000000000000..ec84cc1cea3c --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_restart_instance_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestartInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_RestartInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_restart_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.RestartInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.restart_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_RestartInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_restart_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_restart_instance_sync.py new file mode 100644 index 000000000000..a342c1767767 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_restart_instance_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestartInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_RestartInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_restart_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.RestartInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.restart_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_RestartInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_restore_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_restore_cluster_async.py new file mode 100644 index 000000000000..81b53488f654 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_restore_cluster_async.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_RestoreCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_restore_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup_source = alloydb_v1beta.BackupSource() + backup_source.backup_name = "backup_name_value" + + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.RestoreClusterRequest( + backup_source=backup_source, + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.restore_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_RestoreCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_restore_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_restore_cluster_sync.py new file mode 100644 index 000000000000..2c6d6d3899a5 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_restore_cluster_sync.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_RestoreCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_restore_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + backup_source = alloydb_v1beta.BackupSource() + backup_source.backup_name = "backup_name_value" + + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.RestoreClusterRequest( + backup_source=backup_source, + parent="parent_value", + cluster_id="cluster_id_value", + cluster=cluster, + ) + + # Make the request + operation = client.restore_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_RestoreCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_backup_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_backup_async.py new file mode 100644 index 000000000000..418b578720a6 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_backup_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_UpdateBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_update_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + backup = alloydb_v1beta.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1beta.UpdateBackupRequest( + backup=backup, + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_UpdateBackup_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_backup_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_backup_sync.py new file mode 100644 index 000000000000..4958dd4738a5 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_backup_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_UpdateBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_update_backup(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + backup = alloydb_v1beta.Backup() + backup.cluster_name = "cluster_name_value" + + request = alloydb_v1beta.UpdateBackupRequest( + backup=backup, + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_UpdateBackup_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_cluster_async.py new file mode 100644 index 000000000000..185a594755dc --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_cluster_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_UpdateCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_update_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_UpdateCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_cluster_sync.py new file mode 100644 index 000000000000..5a9581766d2b --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_cluster_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_UpdateCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_update_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + cluster = alloydb_v1beta.Cluster() + cluster.backup_source.backup_name = "backup_name_value" + cluster.network = "network_value" + + request = alloydb_v1beta.UpdateClusterRequest( + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_UpdateCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_instance_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_instance_async.py new file mode 100644 index 000000000000..c668bcfdb602 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_instance_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_UpdateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_update_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + instance = alloydb_v1beta.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_UpdateInstance_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_instance_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_instance_sync.py new file mode 100644 index 000000000000..a6df7a068065 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_update_instance_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_UpdateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_update_instance(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + instance = alloydb_v1beta.Instance() + instance.instance_type = "SECONDARY" + + request = alloydb_v1beta.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_UpdateInstance_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json new file mode 100644 index 000000000000..1939c5e9657e --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json @@ -0,0 +1,3291 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.alloydb.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-alloydb", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.batch_create_instances", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.BatchCreateInstances", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "BatchCreateInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.BatchCreateInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_create_instances" + }, + "description": "Sample for BatchCreateInstances", + "file": "alloydb_v1_generated_alloy_db_admin_batch_create_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_BatchCreateInstances_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_batch_create_instances_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.batch_create_instances", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.BatchCreateInstances", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "BatchCreateInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.BatchCreateInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_create_instances" + }, + "description": "Sample for BatchCreateInstances", + "file": "alloydb_v1_generated_alloy_db_admin_batch_create_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_BatchCreateInstances_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_batch_create_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.create_backup", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.CreateBackup", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.CreateBackupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup", + "type": "google.cloud.alloydb_v1.types.Backup" + }, + { + "name": "backup_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup" + }, + "description": "Sample for CreateBackup", + "file": "alloydb_v1_generated_alloy_db_admin_create_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_CreateBackup_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_create_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.create_backup", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.CreateBackup", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.CreateBackupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup", + "type": "google.cloud.alloydb_v1.types.Backup" + }, + { + "name": "backup_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup" + }, + "description": "Sample for CreateBackup", + "file": "alloydb_v1_generated_alloy_db_admin_create_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_CreateBackup_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_create_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.create_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.CreateCluster", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.CreateClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1.types.Cluster" + }, + { + "name": "cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_cluster" + }, + "description": "Sample for CreateCluster", + "file": "alloydb_v1_generated_alloy_db_admin_create_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_CreateCluster_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_create_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.create_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.CreateCluster", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.CreateClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1.types.Cluster" + }, + { + "name": "cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_cluster" + }, + "description": "Sample for CreateCluster", + "file": "alloydb_v1_generated_alloy_db_admin_create_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_CreateCluster_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_create_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.create_instance", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.CreateInstance", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "alloydb_v1_generated_alloy_db_admin_create_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_CreateInstance_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_create_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.create_instance", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.CreateInstance", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "alloydb_v1_generated_alloy_db_admin_create_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_CreateInstance_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_create_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.delete_backup", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.DeleteBackup", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "alloydb_v1_generated_alloy_db_admin_delete_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_DeleteBackup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_delete_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.delete_backup", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.DeleteBackup", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "alloydb_v1_generated_alloy_db_admin_delete_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_DeleteBackup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_delete_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.delete_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.DeleteCluster", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.DeleteClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_cluster" + }, + "description": "Sample for DeleteCluster", + "file": "alloydb_v1_generated_alloy_db_admin_delete_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_DeleteCluster_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_delete_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.delete_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.DeleteCluster", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.DeleteClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_cluster" + }, + "description": "Sample for DeleteCluster", + "file": "alloydb_v1_generated_alloy_db_admin_delete_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_DeleteCluster_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_delete_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.delete_instance", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.DeleteInstance", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "alloydb_v1_generated_alloy_db_admin_delete_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_DeleteInstance_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_delete_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.delete_instance", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.DeleteInstance", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "alloydb_v1_generated_alloy_db_admin_delete_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_DeleteInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_delete_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.failover_instance", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.FailoverInstance", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "FailoverInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.FailoverInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "failover_instance" + }, + "description": "Sample for FailoverInstance", + "file": "alloydb_v1_generated_alloy_db_admin_failover_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_FailoverInstance_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_failover_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.failover_instance", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.FailoverInstance", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "FailoverInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.FailoverInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "failover_instance" + }, + "description": "Sample for FailoverInstance", + "file": "alloydb_v1_generated_alloy_db_admin_failover_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_FailoverInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_failover_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.get_backup", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.GetBackup", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "alloydb_v1_generated_alloy_db_admin_get_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_GetBackup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_get_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.get_backup", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.GetBackup", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "alloydb_v1_generated_alloy_db_admin_get_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_GetBackup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_get_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.get_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.GetCluster", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.GetClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1.types.Cluster", + "shortName": "get_cluster" + }, + "description": "Sample for GetCluster", + "file": "alloydb_v1_generated_alloy_db_admin_get_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_GetCluster_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_get_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.get_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.GetCluster", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.GetClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1.types.Cluster", + "shortName": "get_cluster" + }, + "description": "Sample for GetCluster", + "file": "alloydb_v1_generated_alloy_db_admin_get_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_GetCluster_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_get_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.get_instance", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.GetInstance", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "alloydb_v1_generated_alloy_db_admin_get_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_GetInstance_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_get_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.get_instance", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.GetInstance", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "alloydb_v1_generated_alloy_db_admin_get_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_GetInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_get_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.list_backups", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.ListBackups", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListBackupsAsyncPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "alloydb_v1_generated_alloy_db_admin_list_backups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_ListBackups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_list_backups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.list_backups", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.ListBackups", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListBackupsPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "alloydb_v1_generated_alloy_db_admin_list_backups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_ListBackups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_list_backups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.list_clusters", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.ListClusters", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.ListClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListClustersAsyncPager", + "shortName": "list_clusters" + }, + "description": "Sample for ListClusters", + "file": "alloydb_v1_generated_alloy_db_admin_list_clusters_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_ListClusters_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_list_clusters_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.list_clusters", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.ListClusters", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.ListClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListClustersPager", + "shortName": "list_clusters" + }, + "description": "Sample for ListClusters", + "file": "alloydb_v1_generated_alloy_db_admin_list_clusters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_ListClusters_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_list_clusters_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.list_instances", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.ListInstances", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListInstancesAsyncPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "alloydb_v1_generated_alloy_db_admin_list_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_ListInstances_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_list_instances_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.list_instances", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.ListInstances", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListInstancesPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "alloydb_v1_generated_alloy_db_admin_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_ListInstances_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_list_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.list_supported_database_flags", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.ListSupportedDatabaseFlags", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListSupportedDatabaseFlags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.ListSupportedDatabaseFlagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListSupportedDatabaseFlagsAsyncPager", + "shortName": "list_supported_database_flags" + }, + "description": "Sample for ListSupportedDatabaseFlags", + "file": "alloydb_v1_generated_alloy_db_admin_list_supported_database_flags_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_list_supported_database_flags_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.list_supported_database_flags", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.ListSupportedDatabaseFlags", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListSupportedDatabaseFlags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.ListSupportedDatabaseFlagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListSupportedDatabaseFlagsPager", + "shortName": "list_supported_database_flags" + }, + "description": "Sample for ListSupportedDatabaseFlags", + "file": "alloydb_v1_generated_alloy_db_admin_list_supported_database_flags_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_list_supported_database_flags_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.restart_instance", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.RestartInstance", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "RestartInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.RestartInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restart_instance" + }, + "description": "Sample for RestartInstance", + "file": "alloydb_v1_generated_alloy_db_admin_restart_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_RestartInstance_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_restart_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.restart_instance", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.RestartInstance", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "RestartInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.RestartInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restart_instance" + }, + "description": "Sample for RestartInstance", + "file": "alloydb_v1_generated_alloy_db_admin_restart_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_RestartInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_restart_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.restore_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.RestoreCluster", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "RestoreCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.RestoreClusterRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restore_cluster" + }, + "description": "Sample for RestoreCluster", + "file": "alloydb_v1_generated_alloy_db_admin_restore_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_RestoreCluster_async", + "segments": [ + { + "end": 65, + "start": 27, + "type": "FULL" + }, + { + "end": 65, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 62, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 66, + "start": 63, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_restore_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.restore_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.RestoreCluster", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "RestoreCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.RestoreClusterRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_cluster" + }, + "description": "Sample for RestoreCluster", + "file": "alloydb_v1_generated_alloy_db_admin_restore_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_RestoreCluster_sync", + "segments": [ + { + "end": 65, + "start": 27, + "type": "FULL" + }, + { + "end": 65, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 62, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 66, + "start": 63, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_restore_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.update_backup", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.UpdateBackup", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.UpdateBackupRequest" + }, + { + "name": "backup", + "type": "google.cloud.alloydb_v1.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_backup" + }, + "description": "Sample for UpdateBackup", + "file": "alloydb_v1_generated_alloy_db_admin_update_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_UpdateBackup_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_update_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.update_backup", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.UpdateBackup", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.UpdateBackupRequest" + }, + { + "name": "backup", + "type": "google.cloud.alloydb_v1.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_backup" + }, + "description": "Sample for UpdateBackup", + "file": "alloydb_v1_generated_alloy_db_admin_update_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_UpdateBackup_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_update_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.update_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.UpdateCluster", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.UpdateClusterRequest" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1.types.Cluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_cluster" + }, + "description": "Sample for UpdateCluster", + "file": "alloydb_v1_generated_alloy_db_admin_update_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_UpdateCluster_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_update_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.update_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.UpdateCluster", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.UpdateClusterRequest" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1.types.Cluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_cluster" + }, + "description": "Sample for UpdateCluster", + "file": "alloydb_v1_generated_alloy_db_admin_update_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_UpdateCluster_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_update_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminAsyncClient.update_instance", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.UpdateInstance", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "alloydb_v1_generated_alloy_db_admin_update_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_UpdateInstance_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_update_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1.AlloyDBAdminClient.update_instance", + "method": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin.UpdateInstance", + "service": { + "fullName": "google.cloud.alloydb.v1.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "alloydb_v1_generated_alloy_db_admin_update_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1_generated_AlloyDBAdmin_UpdateInstance_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1_generated_alloy_db_admin_update_instance_sync.py" + } + ] +} diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json new file mode 100644 index 000000000000..1c84867cd14d --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json @@ -0,0 +1,4128 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.alloydb.v1alpha", + "version": "v1alpha" + } + ], + "language": "PYTHON", + "name": "google-cloud-alloydb", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.batch_create_instances", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.BatchCreateInstances", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "BatchCreateInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.BatchCreateInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_create_instances" + }, + "description": "Sample for BatchCreateInstances", + "file": "alloydb_v1alpha_generated_alloy_db_admin_batch_create_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_BatchCreateInstances_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_batch_create_instances_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.batch_create_instances", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.BatchCreateInstances", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "BatchCreateInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.BatchCreateInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_create_instances" + }, + "description": "Sample for BatchCreateInstances", + "file": "alloydb_v1alpha_generated_alloy_db_admin_batch_create_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_BatchCreateInstances_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_batch_create_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.create_backup", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.CreateBackup", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.CreateBackupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup", + "type": "google.cloud.alloydb_v1alpha.types.Backup" + }, + { + "name": "backup_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup" + }, + "description": "Sample for CreateBackup", + "file": "alloydb_v1alpha_generated_alloy_db_admin_create_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_CreateBackup_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_create_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.create_backup", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.CreateBackup", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.CreateBackupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup", + "type": "google.cloud.alloydb_v1alpha.types.Backup" + }, + { + "name": "backup_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup" + }, + "description": "Sample for CreateBackup", + "file": "alloydb_v1alpha_generated_alloy_db_admin_create_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_CreateBackup_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_create_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.create_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.CreateCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.CreateClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1alpha.types.Cluster" + }, + { + "name": "cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_cluster" + }, + "description": "Sample for CreateCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_create_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_CreateCluster_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_create_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.create_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.CreateCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.CreateClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1alpha.types.Cluster" + }, + { + "name": "cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_cluster" + }, + "description": "Sample for CreateCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_create_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_CreateCluster_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_create_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.create_instance", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.CreateInstance", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1alpha.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "alloydb_v1alpha_generated_alloy_db_admin_create_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_CreateInstance_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_create_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.create_instance", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.CreateInstance", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1alpha.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "alloydb_v1alpha_generated_alloy_db_admin_create_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_CreateInstance_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_create_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.create_secondary_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.CreateSecondaryCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateSecondaryCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.CreateSecondaryClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1alpha.types.Cluster" + }, + { + "name": "cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_secondary_cluster" + }, + "description": "Sample for CreateSecondaryCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_create_secondary_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_CreateSecondaryCluster_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_create_secondary_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.create_secondary_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.CreateSecondaryCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateSecondaryCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.CreateSecondaryClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1alpha.types.Cluster" + }, + { + "name": "cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_secondary_cluster" + }, + "description": "Sample for CreateSecondaryCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_create_secondary_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_CreateSecondaryCluster_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_create_secondary_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.create_secondary_instance", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.CreateSecondaryInstance", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateSecondaryInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.CreateSecondaryInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1alpha.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_secondary_instance" + }, + "description": "Sample for CreateSecondaryInstance", + "file": "alloydb_v1alpha_generated_alloy_db_admin_create_secondary_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_CreateSecondaryInstance_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_create_secondary_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.create_secondary_instance", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.CreateSecondaryInstance", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateSecondaryInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.CreateSecondaryInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1alpha.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_secondary_instance" + }, + "description": "Sample for CreateSecondaryInstance", + "file": "alloydb_v1alpha_generated_alloy_db_admin_create_secondary_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_CreateSecondaryInstance_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_create_secondary_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.delete_backup", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.DeleteBackup", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "alloydb_v1alpha_generated_alloy_db_admin_delete_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_DeleteBackup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_delete_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.delete_backup", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.DeleteBackup", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "alloydb_v1alpha_generated_alloy_db_admin_delete_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_DeleteBackup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_delete_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.delete_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.DeleteCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.DeleteClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_cluster" + }, + "description": "Sample for DeleteCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_delete_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_DeleteCluster_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_delete_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.delete_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.DeleteCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.DeleteClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_cluster" + }, + "description": "Sample for DeleteCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_delete_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_DeleteCluster_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_delete_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.delete_instance", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.DeleteInstance", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "alloydb_v1alpha_generated_alloy_db_admin_delete_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_DeleteInstance_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_delete_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.delete_instance", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.DeleteInstance", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "alloydb_v1alpha_generated_alloy_db_admin_delete_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_DeleteInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_delete_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.failover_instance", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.FailoverInstance", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "FailoverInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.FailoverInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "failover_instance" + }, + "description": "Sample for FailoverInstance", + "file": "alloydb_v1alpha_generated_alloy_db_admin_failover_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_FailoverInstance_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_failover_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.failover_instance", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.FailoverInstance", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "FailoverInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.FailoverInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "failover_instance" + }, + "description": "Sample for FailoverInstance", + "file": "alloydb_v1alpha_generated_alloy_db_admin_failover_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_FailoverInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_failover_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.generate_client_certificate", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.GenerateClientCertificate", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GenerateClientCertificate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.GenerateClientCertificateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.types.GenerateClientCertificateResponse", + "shortName": "generate_client_certificate" + }, + "description": "Sample for GenerateClientCertificate", + "file": "alloydb_v1alpha_generated_alloy_db_admin_generate_client_certificate_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_GenerateClientCertificate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_generate_client_certificate_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.generate_client_certificate", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.GenerateClientCertificate", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GenerateClientCertificate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.GenerateClientCertificateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.types.GenerateClientCertificateResponse", + "shortName": "generate_client_certificate" + }, + "description": "Sample for GenerateClientCertificate", + "file": "alloydb_v1alpha_generated_alloy_db_admin_generate_client_certificate_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_GenerateClientCertificate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_generate_client_certificate_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.get_backup", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.GetBackup", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "alloydb_v1alpha_generated_alloy_db_admin_get_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_GetBackup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_get_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.get_backup", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.GetBackup", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "alloydb_v1alpha_generated_alloy_db_admin_get_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_GetBackup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_get_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.get_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.GetCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.GetClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.types.Cluster", + "shortName": "get_cluster" + }, + "description": "Sample for GetCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_get_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_GetCluster_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_get_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.get_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.GetCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.GetClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.types.Cluster", + "shortName": "get_cluster" + }, + "description": "Sample for GetCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_get_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_GetCluster_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_get_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.get_connection_info", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.GetConnectionInfo", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetConnectionInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.GetConnectionInfoRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.types.ConnectionInfo", + "shortName": "get_connection_info" + }, + "description": "Sample for GetConnectionInfo", + "file": "alloydb_v1alpha_generated_alloy_db_admin_get_connection_info_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_GetConnectionInfo_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_get_connection_info_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.get_connection_info", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.GetConnectionInfo", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetConnectionInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.GetConnectionInfoRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.types.ConnectionInfo", + "shortName": "get_connection_info" + }, + "description": "Sample for GetConnectionInfo", + "file": "alloydb_v1alpha_generated_alloy_db_admin_get_connection_info_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_GetConnectionInfo_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_get_connection_info_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.get_instance", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.GetInstance", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "alloydb_v1alpha_generated_alloy_db_admin_get_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_GetInstance_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_get_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.get_instance", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.GetInstance", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "alloydb_v1alpha_generated_alloy_db_admin_get_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_GetInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_get_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.list_backups", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.ListBackups", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListBackupsAsyncPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "alloydb_v1alpha_generated_alloy_db_admin_list_backups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_ListBackups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_list_backups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.list_backups", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.ListBackups", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListBackupsPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "alloydb_v1alpha_generated_alloy_db_admin_list_backups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_ListBackups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_list_backups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.list_clusters", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.ListClusters", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.ListClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListClustersAsyncPager", + "shortName": "list_clusters" + }, + "description": "Sample for ListClusters", + "file": "alloydb_v1alpha_generated_alloy_db_admin_list_clusters_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_ListClusters_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_list_clusters_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.list_clusters", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.ListClusters", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.ListClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListClustersPager", + "shortName": "list_clusters" + }, + "description": "Sample for ListClusters", + "file": "alloydb_v1alpha_generated_alloy_db_admin_list_clusters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_ListClusters_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_list_clusters_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.list_instances", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.ListInstances", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListInstancesAsyncPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "alloydb_v1alpha_generated_alloy_db_admin_list_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_ListInstances_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_list_instances_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.list_instances", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.ListInstances", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListInstancesPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "alloydb_v1alpha_generated_alloy_db_admin_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_ListInstances_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_list_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.list_supported_database_flags", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.ListSupportedDatabaseFlags", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListSupportedDatabaseFlags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.ListSupportedDatabaseFlagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListSupportedDatabaseFlagsAsyncPager", + "shortName": "list_supported_database_flags" + }, + "description": "Sample for ListSupportedDatabaseFlags", + "file": "alloydb_v1alpha_generated_alloy_db_admin_list_supported_database_flags_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_list_supported_database_flags_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.list_supported_database_flags", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.ListSupportedDatabaseFlags", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListSupportedDatabaseFlags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.ListSupportedDatabaseFlagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListSupportedDatabaseFlagsPager", + "shortName": "list_supported_database_flags" + }, + "description": "Sample for ListSupportedDatabaseFlags", + "file": "alloydb_v1alpha_generated_alloy_db_admin_list_supported_database_flags_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_list_supported_database_flags_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.promote_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.PromoteCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "PromoteCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.PromoteClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "promote_cluster" + }, + "description": "Sample for PromoteCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_promote_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_PromoteCluster_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_promote_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.promote_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.PromoteCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "PromoteCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.PromoteClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "promote_cluster" + }, + "description": "Sample for PromoteCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_promote_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_PromoteCluster_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_promote_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.restart_instance", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.RestartInstance", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "RestartInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.RestartInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restart_instance" + }, + "description": "Sample for RestartInstance", + "file": "alloydb_v1alpha_generated_alloy_db_admin_restart_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_RestartInstance_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_restart_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.restart_instance", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.RestartInstance", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "RestartInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.RestartInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restart_instance" + }, + "description": "Sample for RestartInstance", + "file": "alloydb_v1alpha_generated_alloy_db_admin_restart_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_RestartInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_restart_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.restore_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.RestoreCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "RestoreCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.RestoreClusterRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restore_cluster" + }, + "description": "Sample for RestoreCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_restore_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_RestoreCluster_async", + "segments": [ + { + "end": 65, + "start": 27, + "type": "FULL" + }, + { + "end": 65, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 62, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 66, + "start": 63, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_restore_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.restore_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.RestoreCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "RestoreCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.RestoreClusterRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_cluster" + }, + "description": "Sample for RestoreCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_restore_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_RestoreCluster_sync", + "segments": [ + { + "end": 65, + "start": 27, + "type": "FULL" + }, + { + "end": 65, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 62, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 66, + "start": 63, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_restore_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.update_backup", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.UpdateBackup", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.UpdateBackupRequest" + }, + { + "name": "backup", + "type": "google.cloud.alloydb_v1alpha.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_backup" + }, + "description": "Sample for UpdateBackup", + "file": "alloydb_v1alpha_generated_alloy_db_admin_update_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_UpdateBackup_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_update_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.update_backup", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.UpdateBackup", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.UpdateBackupRequest" + }, + { + "name": "backup", + "type": "google.cloud.alloydb_v1alpha.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_backup" + }, + "description": "Sample for UpdateBackup", + "file": "alloydb_v1alpha_generated_alloy_db_admin_update_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_UpdateBackup_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_update_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.update_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.UpdateCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.UpdateClusterRequest" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1alpha.types.Cluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_cluster" + }, + "description": "Sample for UpdateCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_update_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_UpdateCluster_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_update_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.update_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.UpdateCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.UpdateClusterRequest" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1alpha.types.Cluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_cluster" + }, + "description": "Sample for UpdateCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_update_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_UpdateCluster_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_update_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.update_instance", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.UpdateInstance", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1alpha.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "alloydb_v1alpha_generated_alloy_db_admin_update_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_UpdateInstance_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_update_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.update_instance", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.UpdateInstance", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1alpha.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "alloydb_v1alpha_generated_alloy_db_admin_update_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_UpdateInstance_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_update_instance_sync.py" + } + ] +} diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json new file mode 100644 index 000000000000..eb79e45aaf44 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json @@ -0,0 +1,4128 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.alloydb.v1beta", + "version": "v1beta" + } + ], + "language": "PYTHON", + "name": "google-cloud-alloydb", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.batch_create_instances", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.BatchCreateInstances", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "BatchCreateInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.BatchCreateInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_create_instances" + }, + "description": "Sample for BatchCreateInstances", + "file": "alloydb_v1beta_generated_alloy_db_admin_batch_create_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_BatchCreateInstances_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_batch_create_instances_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.batch_create_instances", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.BatchCreateInstances", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "BatchCreateInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.BatchCreateInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_create_instances" + }, + "description": "Sample for BatchCreateInstances", + "file": "alloydb_v1beta_generated_alloy_db_admin_batch_create_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_BatchCreateInstances_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_batch_create_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.create_backup", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.CreateBackup", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.CreateBackupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup", + "type": "google.cloud.alloydb_v1beta.types.Backup" + }, + { + "name": "backup_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup" + }, + "description": "Sample for CreateBackup", + "file": "alloydb_v1beta_generated_alloy_db_admin_create_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_CreateBackup_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_create_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.create_backup", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.CreateBackup", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.CreateBackupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup", + "type": "google.cloud.alloydb_v1beta.types.Backup" + }, + { + "name": "backup_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup" + }, + "description": "Sample for CreateBackup", + "file": "alloydb_v1beta_generated_alloy_db_admin_create_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_CreateBackup_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_create_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.create_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.CreateCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.CreateClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1beta.types.Cluster" + }, + { + "name": "cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_cluster" + }, + "description": "Sample for CreateCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_create_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_CreateCluster_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_create_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.create_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.CreateCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.CreateClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1beta.types.Cluster" + }, + { + "name": "cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_cluster" + }, + "description": "Sample for CreateCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_create_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_CreateCluster_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_create_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.create_instance", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.CreateInstance", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1beta.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "alloydb_v1beta_generated_alloy_db_admin_create_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_CreateInstance_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_create_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.create_instance", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.CreateInstance", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1beta.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "alloydb_v1beta_generated_alloy_db_admin_create_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_CreateInstance_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_create_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.create_secondary_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.CreateSecondaryCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateSecondaryCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.CreateSecondaryClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1beta.types.Cluster" + }, + { + "name": "cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_secondary_cluster" + }, + "description": "Sample for CreateSecondaryCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_create_secondary_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_CreateSecondaryCluster_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_create_secondary_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.create_secondary_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.CreateSecondaryCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateSecondaryCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.CreateSecondaryClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1beta.types.Cluster" + }, + { + "name": "cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_secondary_cluster" + }, + "description": "Sample for CreateSecondaryCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_create_secondary_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_CreateSecondaryCluster_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_create_secondary_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.create_secondary_instance", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.CreateSecondaryInstance", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateSecondaryInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.CreateSecondaryInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1beta.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_secondary_instance" + }, + "description": "Sample for CreateSecondaryInstance", + "file": "alloydb_v1beta_generated_alloy_db_admin_create_secondary_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_CreateSecondaryInstance_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_create_secondary_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.create_secondary_instance", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.CreateSecondaryInstance", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateSecondaryInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.CreateSecondaryInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1beta.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_secondary_instance" + }, + "description": "Sample for CreateSecondaryInstance", + "file": "alloydb_v1beta_generated_alloy_db_admin_create_secondary_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_CreateSecondaryInstance_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_create_secondary_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.delete_backup", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.DeleteBackup", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "alloydb_v1beta_generated_alloy_db_admin_delete_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_DeleteBackup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_delete_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.delete_backup", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.DeleteBackup", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "alloydb_v1beta_generated_alloy_db_admin_delete_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_DeleteBackup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_delete_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.delete_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.DeleteCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.DeleteClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_cluster" + }, + "description": "Sample for DeleteCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_delete_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_DeleteCluster_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_delete_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.delete_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.DeleteCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.DeleteClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_cluster" + }, + "description": "Sample for DeleteCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_delete_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_DeleteCluster_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_delete_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.delete_instance", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.DeleteInstance", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "alloydb_v1beta_generated_alloy_db_admin_delete_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_DeleteInstance_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_delete_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.delete_instance", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.DeleteInstance", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "alloydb_v1beta_generated_alloy_db_admin_delete_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_DeleteInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_delete_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.failover_instance", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.FailoverInstance", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "FailoverInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.FailoverInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "failover_instance" + }, + "description": "Sample for FailoverInstance", + "file": "alloydb_v1beta_generated_alloy_db_admin_failover_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_FailoverInstance_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_failover_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.failover_instance", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.FailoverInstance", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "FailoverInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.FailoverInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "failover_instance" + }, + "description": "Sample for FailoverInstance", + "file": "alloydb_v1beta_generated_alloy_db_admin_failover_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_FailoverInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_failover_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.generate_client_certificate", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.GenerateClientCertificate", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GenerateClientCertificate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.GenerateClientCertificateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.types.GenerateClientCertificateResponse", + "shortName": "generate_client_certificate" + }, + "description": "Sample for GenerateClientCertificate", + "file": "alloydb_v1beta_generated_alloy_db_admin_generate_client_certificate_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_GenerateClientCertificate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_generate_client_certificate_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.generate_client_certificate", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.GenerateClientCertificate", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GenerateClientCertificate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.GenerateClientCertificateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.types.GenerateClientCertificateResponse", + "shortName": "generate_client_certificate" + }, + "description": "Sample for GenerateClientCertificate", + "file": "alloydb_v1beta_generated_alloy_db_admin_generate_client_certificate_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_GenerateClientCertificate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_generate_client_certificate_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.get_backup", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.GetBackup", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "alloydb_v1beta_generated_alloy_db_admin_get_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_GetBackup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_get_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.get_backup", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.GetBackup", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "alloydb_v1beta_generated_alloy_db_admin_get_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_GetBackup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_get_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.get_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.GetCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.GetClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.types.Cluster", + "shortName": "get_cluster" + }, + "description": "Sample for GetCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_get_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_GetCluster_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_get_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.get_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.GetCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.GetClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.types.Cluster", + "shortName": "get_cluster" + }, + "description": "Sample for GetCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_get_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_GetCluster_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_get_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.get_connection_info", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.GetConnectionInfo", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetConnectionInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.GetConnectionInfoRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.types.ConnectionInfo", + "shortName": "get_connection_info" + }, + "description": "Sample for GetConnectionInfo", + "file": "alloydb_v1beta_generated_alloy_db_admin_get_connection_info_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_GetConnectionInfo_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_get_connection_info_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.get_connection_info", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.GetConnectionInfo", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetConnectionInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.GetConnectionInfoRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.types.ConnectionInfo", + "shortName": "get_connection_info" + }, + "description": "Sample for GetConnectionInfo", + "file": "alloydb_v1beta_generated_alloy_db_admin_get_connection_info_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_GetConnectionInfo_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_get_connection_info_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.get_instance", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.GetInstance", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "alloydb_v1beta_generated_alloy_db_admin_get_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_GetInstance_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_get_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.get_instance", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.GetInstance", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "alloydb_v1beta_generated_alloy_db_admin_get_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_GetInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_get_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.list_backups", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.ListBackups", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListBackupsAsyncPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "alloydb_v1beta_generated_alloy_db_admin_list_backups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_ListBackups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_list_backups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.list_backups", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.ListBackups", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListBackupsPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "alloydb_v1beta_generated_alloy_db_admin_list_backups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_ListBackups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_list_backups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.list_clusters", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.ListClusters", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.ListClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListClustersAsyncPager", + "shortName": "list_clusters" + }, + "description": "Sample for ListClusters", + "file": "alloydb_v1beta_generated_alloy_db_admin_list_clusters_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_ListClusters_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_list_clusters_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.list_clusters", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.ListClusters", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.ListClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListClustersPager", + "shortName": "list_clusters" + }, + "description": "Sample for ListClusters", + "file": "alloydb_v1beta_generated_alloy_db_admin_list_clusters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_ListClusters_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_list_clusters_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.list_instances", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.ListInstances", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListInstancesAsyncPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "alloydb_v1beta_generated_alloy_db_admin_list_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_ListInstances_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_list_instances_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.list_instances", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.ListInstances", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListInstancesPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "alloydb_v1beta_generated_alloy_db_admin_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_ListInstances_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_list_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.list_supported_database_flags", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.ListSupportedDatabaseFlags", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListSupportedDatabaseFlags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.ListSupportedDatabaseFlagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListSupportedDatabaseFlagsAsyncPager", + "shortName": "list_supported_database_flags" + }, + "description": "Sample for ListSupportedDatabaseFlags", + "file": "alloydb_v1beta_generated_alloy_db_admin_list_supported_database_flags_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_list_supported_database_flags_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.list_supported_database_flags", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.ListSupportedDatabaseFlags", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListSupportedDatabaseFlags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.ListSupportedDatabaseFlagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListSupportedDatabaseFlagsPager", + "shortName": "list_supported_database_flags" + }, + "description": "Sample for ListSupportedDatabaseFlags", + "file": "alloydb_v1beta_generated_alloy_db_admin_list_supported_database_flags_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_ListSupportedDatabaseFlags_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_list_supported_database_flags_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.promote_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.PromoteCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "PromoteCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.PromoteClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "promote_cluster" + }, + "description": "Sample for PromoteCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_promote_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_PromoteCluster_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_promote_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.promote_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.PromoteCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "PromoteCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.PromoteClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "promote_cluster" + }, + "description": "Sample for PromoteCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_promote_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_PromoteCluster_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_promote_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.restart_instance", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.RestartInstance", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "RestartInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.RestartInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restart_instance" + }, + "description": "Sample for RestartInstance", + "file": "alloydb_v1beta_generated_alloy_db_admin_restart_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_RestartInstance_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_restart_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.restart_instance", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.RestartInstance", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "RestartInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.RestartInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restart_instance" + }, + "description": "Sample for RestartInstance", + "file": "alloydb_v1beta_generated_alloy_db_admin_restart_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_RestartInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_restart_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.restore_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.RestoreCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "RestoreCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.RestoreClusterRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restore_cluster" + }, + "description": "Sample for RestoreCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_restore_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_RestoreCluster_async", + "segments": [ + { + "end": 65, + "start": 27, + "type": "FULL" + }, + { + "end": 65, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 62, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 66, + "start": 63, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_restore_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.restore_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.RestoreCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "RestoreCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.RestoreClusterRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_cluster" + }, + "description": "Sample for RestoreCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_restore_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_RestoreCluster_sync", + "segments": [ + { + "end": 65, + "start": 27, + "type": "FULL" + }, + { + "end": 65, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 62, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 66, + "start": 63, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_restore_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.update_backup", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.UpdateBackup", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.UpdateBackupRequest" + }, + { + "name": "backup", + "type": "google.cloud.alloydb_v1beta.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_backup" + }, + "description": "Sample for UpdateBackup", + "file": "alloydb_v1beta_generated_alloy_db_admin_update_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_UpdateBackup_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_update_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.update_backup", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.UpdateBackup", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.UpdateBackupRequest" + }, + { + "name": "backup", + "type": "google.cloud.alloydb_v1beta.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_backup" + }, + "description": "Sample for UpdateBackup", + "file": "alloydb_v1beta_generated_alloy_db_admin_update_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_UpdateBackup_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_update_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.update_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.UpdateCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.UpdateClusterRequest" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1beta.types.Cluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_cluster" + }, + "description": "Sample for UpdateCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_update_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_UpdateCluster_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_update_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.update_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.UpdateCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.UpdateClusterRequest" + }, + { + "name": "cluster", + "type": "google.cloud.alloydb_v1beta.types.Cluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_cluster" + }, + "description": "Sample for UpdateCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_update_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_UpdateCluster_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_update_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.update_instance", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.UpdateInstance", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1beta.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "alloydb_v1beta_generated_alloy_db_admin_update_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_UpdateInstance_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_update_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.update_instance", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.UpdateInstance", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.alloydb_v1beta.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "alloydb_v1beta_generated_alloy_db_admin_update_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_UpdateInstance_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_update_instance_sync.py" + } + ] +} diff --git a/packages/google-cloud-alloydb/scripts/decrypt-secrets.sh b/packages/google-cloud-alloydb/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-cloud-alloydb/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1_keywords.py b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1_keywords.py new file mode 100644 index 000000000000..cff31470f722 --- /dev/null +++ b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1_keywords.py @@ -0,0 +1,195 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class alloydbCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_create_instances': ('parent', 'requests', 'request_id', ), + 'create_backup': ('parent', 'backup_id', 'backup', 'request_id', 'validate_only', ), + 'create_cluster': ('parent', 'cluster_id', 'cluster', 'request_id', 'validate_only', ), + 'create_instance': ('parent', 'instance_id', 'instance', 'request_id', 'validate_only', ), + 'delete_backup': ('name', 'request_id', 'validate_only', 'etag', ), + 'delete_cluster': ('name', 'request_id', 'etag', 'validate_only', 'force', ), + 'delete_instance': ('name', 'request_id', 'etag', 'validate_only', ), + 'failover_instance': ('name', 'request_id', 'validate_only', ), + 'get_backup': ('name', ), + 'get_cluster': ('name', ), + 'get_instance': ('name', 'view', ), + 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_clusters': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_supported_database_flags': ('parent', 'page_size', 'page_token', ), + 'restart_instance': ('name', 'request_id', 'validate_only', ), + 'restore_cluster': ('parent', 'cluster_id', 'cluster', 'backup_source', 'request_id', 'validate_only', ), + 'update_backup': ('backup', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), + 'update_cluster': ('cluster', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), + 'update_instance': ('instance', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=alloydbCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the alloydb client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1alpha_keywords.py b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1alpha_keywords.py new file mode 100644 index 000000000000..0776936968a8 --- /dev/null +++ b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1alpha_keywords.py @@ -0,0 +1,200 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class alloydbCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_create_instances': ('parent', 'requests', 'request_id', ), + 'create_backup': ('parent', 'backup_id', 'backup', 'request_id', 'validate_only', ), + 'create_cluster': ('parent', 'cluster_id', 'cluster', 'request_id', 'validate_only', ), + 'create_instance': ('parent', 'instance_id', 'instance', 'request_id', 'validate_only', ), + 'create_secondary_cluster': ('parent', 'cluster_id', 'cluster', 'request_id', 'validate_only', ), + 'create_secondary_instance': ('parent', 'instance_id', 'instance', 'request_id', 'validate_only', ), + 'delete_backup': ('name', 'request_id', 'validate_only', 'etag', ), + 'delete_cluster': ('name', 'request_id', 'etag', 'validate_only', 'force', ), + 'delete_instance': ('name', 'request_id', 'etag', 'validate_only', ), + 'failover_instance': ('name', 'request_id', 'validate_only', ), + 'generate_client_certificate': ('parent', 'request_id', 'pem_csr', 'cert_duration', ), + 'get_backup': ('name', ), + 'get_cluster': ('name', ), + 'get_connection_info': ('parent', 'request_id', ), + 'get_instance': ('name', 'view', ), + 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_clusters': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_supported_database_flags': ('parent', 'page_size', 'page_token', ), + 'promote_cluster': ('name', 'request_id', 'etag', 'validate_only', ), + 'restart_instance': ('name', 'request_id', 'validate_only', ), + 'restore_cluster': ('parent', 'cluster_id', 'cluster', 'backup_source', 'continuous_backup_source', 'request_id', 'validate_only', ), + 'update_backup': ('backup', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), + 'update_cluster': ('cluster', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), + 'update_instance': ('instance', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=alloydbCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the alloydb client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1beta_keywords.py b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1beta_keywords.py new file mode 100644 index 000000000000..0776936968a8 --- /dev/null +++ b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1beta_keywords.py @@ -0,0 +1,200 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class alloydbCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_create_instances': ('parent', 'requests', 'request_id', ), + 'create_backup': ('parent', 'backup_id', 'backup', 'request_id', 'validate_only', ), + 'create_cluster': ('parent', 'cluster_id', 'cluster', 'request_id', 'validate_only', ), + 'create_instance': ('parent', 'instance_id', 'instance', 'request_id', 'validate_only', ), + 'create_secondary_cluster': ('parent', 'cluster_id', 'cluster', 'request_id', 'validate_only', ), + 'create_secondary_instance': ('parent', 'instance_id', 'instance', 'request_id', 'validate_only', ), + 'delete_backup': ('name', 'request_id', 'validate_only', 'etag', ), + 'delete_cluster': ('name', 'request_id', 'etag', 'validate_only', 'force', ), + 'delete_instance': ('name', 'request_id', 'etag', 'validate_only', ), + 'failover_instance': ('name', 'request_id', 'validate_only', ), + 'generate_client_certificate': ('parent', 'request_id', 'pem_csr', 'cert_duration', ), + 'get_backup': ('name', ), + 'get_cluster': ('name', ), + 'get_connection_info': ('parent', 'request_id', ), + 'get_instance': ('name', 'view', ), + 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_clusters': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_supported_database_flags': ('parent', 'page_size', 'page_token', ), + 'promote_cluster': ('name', 'request_id', 'etag', 'validate_only', ), + 'restart_instance': ('name', 'request_id', 'validate_only', ), + 'restore_cluster': ('parent', 'cluster_id', 'cluster', 'backup_source', 'continuous_backup_source', 'request_id', 'validate_only', ), + 'update_backup': ('backup', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), + 'update_cluster': ('cluster', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), + 'update_instance': ('instance', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=alloydbCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the alloydb client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-alloydb/setup.py b/packages/google-cloud-alloydb/setup.py new file mode 100644 index 000000000000..6789b614d3be --- /dev/null +++ b/packages/google-cloud-alloydb/setup.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-alloydb" + + +description = "Google Cloud Alloydb API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/alloydb/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-alloydb/testing/.gitignore b/packages/google-cloud-alloydb/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-alloydb/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-alloydb/testing/constraints-3.10.txt b/packages/google-cloud-alloydb/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-alloydb/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-alloydb/testing/constraints-3.11.txt b/packages/google-cloud-alloydb/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-alloydb/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-alloydb/testing/constraints-3.12.txt b/packages/google-cloud-alloydb/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-alloydb/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-alloydb/testing/constraints-3.7.txt b/packages/google-cloud-alloydb/testing/constraints-3.7.txt new file mode 100644 index 000000000000..2beecf99e0be --- /dev/null +++ b/packages/google-cloud-alloydb/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/packages/google-cloud-alloydb/testing/constraints-3.8.txt b/packages/google-cloud-alloydb/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-alloydb/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-alloydb/testing/constraints-3.9.txt b/packages/google-cloud-alloydb/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-alloydb/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-alloydb/tests/__init__.py b/packages/google-cloud-alloydb/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-alloydb/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-alloydb/tests/unit/__init__.py b/packages/google-cloud-alloydb/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-alloydb/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/__init__.py b/packages/google-cloud-alloydb/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-alloydb/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/__init__.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py new file mode 100644 index 000000000000..70d92e6a23b4 --- /dev/null +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py @@ -0,0 +1,14683 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.alloydb_v1.services.alloy_db_admin import ( + AlloyDBAdminAsyncClient, + AlloyDBAdminClient, + pagers, + transports, +) +from google.cloud.alloydb_v1.types import resources, service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AlloyDBAdminClient._get_default_mtls_endpoint(None) is None + assert ( + AlloyDBAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + AlloyDBAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AlloyDBAdminClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AlloyDBAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert AlloyDBAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AlloyDBAdminClient, "grpc"), + (AlloyDBAdminAsyncClient, "grpc_asyncio"), + (AlloyDBAdminClient, "rest"), + ], +) +def test_alloy_db_admin_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "alloydb.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://alloydb.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AlloyDBAdminGrpcTransport, "grpc"), + (transports.AlloyDBAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AlloyDBAdminRestTransport, "rest"), + ], +) +def test_alloy_db_admin_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AlloyDBAdminClient, "grpc"), + (AlloyDBAdminAsyncClient, "grpc_asyncio"), + (AlloyDBAdminClient, "rest"), + ], +) +def test_alloy_db_admin_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "alloydb.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://alloydb.googleapis.com" + ) + + +def test_alloy_db_admin_client_get_transport_class(): + transport = AlloyDBAdminClient.get_transport_class() + available_transports = [ + transports.AlloyDBAdminGrpcTransport, + transports.AlloyDBAdminRestTransport, + ] + assert transport in available_transports + + transport = AlloyDBAdminClient.get_transport_class("grpc") + assert transport == transports.AlloyDBAdminGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport, "grpc"), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest"), + ], +) +@mock.patch.object( + AlloyDBAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlloyDBAdminClient) +) +@mock.patch.object( + AlloyDBAdminAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AlloyDBAdminAsyncClient), +) +def test_alloy_db_admin_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AlloyDBAdminClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AlloyDBAdminClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport, "grpc", "true"), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport, "grpc", "false"), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest", "true"), + (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + AlloyDBAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlloyDBAdminClient) +) +@mock.patch.object( + AlloyDBAdminAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AlloyDBAdminAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_alloy_db_admin_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [AlloyDBAdminClient, AlloyDBAdminAsyncClient]) +@mock.patch.object( + AlloyDBAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlloyDBAdminClient) +) +@mock.patch.object( + AlloyDBAdminAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AlloyDBAdminAsyncClient), +) +def test_alloy_db_admin_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport, "grpc"), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest"), + ], +) +def test_alloy_db_admin_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AlloyDBAdminClient, + transports.AlloyDBAdminGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest", None), + ], +) +def test_alloy_db_admin_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_alloy_db_admin_client_client_options_from_dict(): + with mock.patch( + "google.cloud.alloydb_v1.services.alloy_db_admin.transports.AlloyDBAdminGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AlloyDBAdminClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AlloyDBAdminClient, + transports.AlloyDBAdminGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_alloy_db_admin_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "alloydb.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="alloydb.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListClustersRequest, + dict, + ], +) +def test_list_clusters(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_clusters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + client.list_clusters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListClustersRequest() + + +@pytest.mark.asyncio +async def test_list_clusters_async( + transport: str = "grpc_asyncio", request_type=service.ListClustersRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_clusters_async_from_dict(): + await test_list_clusters_async(request_type=dict) + + +def test_list_clusters_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = service.ListClustersResponse() + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_clusters_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListClustersResponse() + ) + await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_clusters_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListClustersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_clusters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_clusters_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + service.ListClustersRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_clusters_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListClustersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListClustersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_clusters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_clusters_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_clusters( + service.ListClustersRequest(), + parent="parent_value", + ) + + +def test_list_clusters_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_clusters(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Cluster) for i in results) + + +def test_list_clusters_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + pages = list(client.list_clusters(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_clusters_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_clusters), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_clusters( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Cluster) for i in responses) + + +@pytest.mark.asyncio +async def test_list_clusters_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_clusters), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_clusters(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetClusterRequest, + dict, + ], +) +def test_get_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Cluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Cluster.State.READY, + cluster_type=resources.Cluster.ClusterType.PRIMARY, + database_version=resources.DatabaseVersion.POSTGRES_13, + network="network_value", + etag="etag_value", + reconciling=True, + backup_source=resources.BackupSource(backup_uid="backup_uid_value"), + ) + response = client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Cluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Cluster.State.READY + assert response.cluster_type == resources.Cluster.ClusterType.PRIMARY + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 + assert response.network == "network_value" + assert response.etag == "etag_value" + assert response.reconciling is True + + +def test_get_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + client.get_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetClusterRequest() + + +@pytest.mark.asyncio +async def test_get_cluster_async( + transport: str = "grpc_asyncio", request_type=service.GetClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Cluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Cluster.State.READY, + cluster_type=resources.Cluster.ClusterType.PRIMARY, + database_version=resources.DatabaseVersion.POSTGRES_13, + network="network_value", + etag="etag_value", + reconciling=True, + ) + ) + response = await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Cluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Cluster.State.READY + assert response.cluster_type == resources.Cluster.ClusterType.PRIMARY + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 + assert response.network == "network_value" + assert response.etag == "etag_value" + assert response.reconciling is True + + +@pytest.mark.asyncio +async def test_get_cluster_async_from_dict(): + await test_get_cluster_async(request_type=dict) + + +def test_get_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = resources.Cluster() + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Cluster()) + await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Cluster() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + service.GetClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Cluster() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Cluster()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_cluster( + service.GetClusterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateClusterRequest, + dict, + ], +) +def test_create_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + client.create_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateClusterRequest() + + +@pytest.mark.asyncio +async def test_create_cluster_async( + transport: str = "grpc_asyncio", request_type=service.CreateClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_cluster_async_from_dict(): + await test_create_cluster_async(request_type=dict) + + +def test_create_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_cluster( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +def test_create_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cluster( + service.CreateClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_cluster( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_cluster( + service.CreateClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateClusterRequest, + dict, + ], +) +def test_update_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + client.update_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateClusterRequest() + + +@pytest.mark.asyncio +async def test_update_cluster_async( + transport: str = "grpc_asyncio", request_type=service.UpdateClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_cluster_async_from_dict(): + await test_update_cluster_async(request_type=dict) + + +def test_update_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateClusterRequest() + + request.cluster.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "cluster.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateClusterRequest() + + request.cluster.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "cluster.name=name_value", + ) in kw["metadata"] + + +def test_update_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_cluster( + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cluster( + service.UpdateClusterRequest(), + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_cluster( + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_cluster( + service.UpdateClusterRequest(), + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + client.delete_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteClusterRequest() + + +@pytest.mark.asyncio +async def test_delete_cluster_async( + transport: str = "grpc_asyncio", request_type=service.DeleteClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_cluster_async_from_dict(): + await test_delete_cluster_async(request_type=dict) + + +def test_delete_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cluster( + service.DeleteClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_cluster( + service.DeleteClusterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.RestoreClusterRequest, + dict, + ], +) +def test_restore_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restore_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestoreClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restore_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + client.restore_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestoreClusterRequest() + + +@pytest.mark.asyncio +async def test_restore_cluster_async( + transport: str = "grpc_asyncio", request_type=service.RestoreClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestoreClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restore_cluster_async_from_dict(): + await test_restore_cluster_async(request_type=dict) + + +def test_restore_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RestoreClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_restore_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RestoreClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.restore_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListInstancesRequest, + dict, + ], +) +def test_list_instances(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListInstancesRequest() + + +@pytest.mark.asyncio +async def test_list_instances_async( + transport: str = "grpc_asyncio", request_type=service.ListInstancesRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + + +def test_list_instances_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = service.ListInstancesResponse() + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_instances_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse() + ) + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_instances_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListInstancesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instances( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_instances_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + service.ListInstancesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_instances_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListInstancesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instances( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_instances_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instances( + service.ListInstancesRequest(), + parent="parent_value", + ) + + +def test_list_instances_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_instances(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Instance) for i in results) + + +def test_list_instances_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instances( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Instance) for i in responses) + + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_instances(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetInstanceRequest, + dict, + ], +) +def test_get_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Instance( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Instance.State.READY, + instance_type=resources.Instance.InstanceType.PRIMARY, + availability_type=resources.Instance.AvailabilityType.ZONAL, + gce_zone="gce_zone_value", + ip_address="ip_address_value", + reconciling=True, + etag="etag_value", + ) + response = client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Instance.State.READY + assert response.instance_type == resources.Instance.InstanceType.PRIMARY + assert response.availability_type == resources.Instance.AvailabilityType.ZONAL + assert response.gce_zone == "gce_zone_value" + assert response.ip_address == "ip_address_value" + assert response.reconciling is True + assert response.etag == "etag_value" + + +def test_get_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetInstanceRequest() + + +@pytest.mark.asyncio +async def test_get_instance_async( + transport: str = "grpc_asyncio", request_type=service.GetInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Instance( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Instance.State.READY, + instance_type=resources.Instance.InstanceType.PRIMARY, + availability_type=resources.Instance.AvailabilityType.ZONAL, + gce_zone="gce_zone_value", + ip_address="ip_address_value", + reconciling=True, + etag="etag_value", + ) + ) + response = await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Instance.State.READY + assert response.instance_type == resources.Instance.InstanceType.PRIMARY + assert response.availability_type == resources.Instance.AvailabilityType.ZONAL + assert response.gce_zone == "gce_zone_value" + assert response.ip_address == "ip_address_value" + assert response.reconciling is True + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + + +def test_get_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = resources.Instance() + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Instance()) + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Instance() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + service.GetInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Instance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Instance()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance( + service.GetInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateInstanceRequest, + dict, + ], +) +def test_create_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateInstanceRequest() + + +@pytest.mark.asyncio +async def test_create_instance_async( + transport: str = "grpc_asyncio", request_type=service.CreateInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + + +def test_create_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateInstanceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateInstanceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +def test_create_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + service.CreateInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance( + service.CreateInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.BatchCreateInstancesRequest, + dict, + ], +) +def test_batch_create_instances(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.BatchCreateInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_batch_create_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + client.batch_create_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.BatchCreateInstancesRequest() + + +@pytest.mark.asyncio +async def test_batch_create_instances_async( + transport: str = "grpc_asyncio", request_type=service.BatchCreateInstancesRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.BatchCreateInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_batch_create_instances_async_from_dict(): + await test_batch_create_instances_async(request_type=dict) + + +def test_batch_create_instances_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.BatchCreateInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_create_instances_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.BatchCreateInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateInstanceRequest() + + +@pytest.mark.asyncio +async def test_update_instance_async( + transport: str = "grpc_asyncio", request_type=service.UpdateInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_async_from_dict(): + await test_update_instance_async(request_type=dict) + + +def test_update_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateInstanceRequest() + + request.instance.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateInstanceRequest() + + request.instance.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] + + +def test_update_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance( + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + service.UpdateInstanceRequest(), + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance( + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance( + service.UpdateInstanceRequest(), + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteInstanceRequest, + dict, + ], +) +def test_delete_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteInstanceRequest() + + +@pytest.mark.asyncio +async def test_delete_instance_async( + transport: str = "grpc_asyncio", request_type=service.DeleteInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + + +def test_delete_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + service.DeleteInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance( + service.DeleteInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.FailoverInstanceRequest, + dict, + ], +) +def test_failover_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.FailoverInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_failover_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + client.failover_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.FailoverInstanceRequest() + + +@pytest.mark.asyncio +async def test_failover_instance_async( + transport: str = "grpc_asyncio", request_type=service.FailoverInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.FailoverInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_failover_instance_async_from_dict(): + await test_failover_instance_async(request_type=dict) + + +def test_failover_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.FailoverInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_failover_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.FailoverInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_failover_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.failover_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_failover_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.failover_instance( + service.FailoverInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_failover_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.failover_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_failover_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.failover_instance( + service.FailoverInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.RestartInstanceRequest, + dict, + ], +) +def test_restart_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restart_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestartInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restart_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + client.restart_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestartInstanceRequest() + + +@pytest.mark.asyncio +async def test_restart_instance_async( + transport: str = "grpc_asyncio", request_type=service.RestartInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restart_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestartInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restart_instance_async_from_dict(): + await test_restart_instance_async(request_type=dict) + + +def test_restart_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RestartInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restart_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_restart_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RestartInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.restart_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_restart_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.restart_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_restart_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restart_instance( + service.RestartInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_restart_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.restart_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_restart_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.restart_instance( + service.RestartInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListBackupsRequest, + dict, + ], +) +def test_list_backups(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListBackupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListBackupsRequest() + + +@pytest.mark.asyncio +async def test_list_backups_async( + transport: str = "grpc_asyncio", request_type=service.ListBackupsRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListBackupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) + + +def test_list_backups_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = service.ListBackupsResponse() + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backups_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListBackupsResponse() + ) + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backups_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListBackupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backups_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + service.ListBackupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backups_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListBackupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListBackupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backups_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backups( + service.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backups(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Backup) for i in results) + + +def test_list_backups_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backups_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Backup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backups_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_backups(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetBackupRequest, + dict, + ], +) +def test_get_backup(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Backup( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Backup.State.READY, + type_=resources.Backup.Type.ON_DEMAND, + description="description_value", + cluster_uid="cluster_uid_value", + cluster_name="cluster_name_value", + reconciling=True, + etag="etag_value", + size_bytes=1089, + ) + response = client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Backup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Backup.State.READY + assert response.type_ == resources.Backup.Type.ON_DEMAND + assert response.description == "description_value" + assert response.cluster_uid == "cluster_uid_value" + assert response.cluster_name == "cluster_name_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.size_bytes == 1089 + + +def test_get_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetBackupRequest() + + +@pytest.mark.asyncio +async def test_get_backup_async( + transport: str = "grpc_asyncio", request_type=service.GetBackupRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Backup( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Backup.State.READY, + type_=resources.Backup.Type.ON_DEMAND, + description="description_value", + cluster_uid="cluster_uid_value", + cluster_name="cluster_name_value", + reconciling=True, + etag="etag_value", + size_bytes=1089, + ) + ) + response = await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Backup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Backup.State.READY + assert response.type_ == resources.Backup.Type.ON_DEMAND + assert response.description == "description_value" + assert response.cluster_uid == "cluster_uid_value" + assert response.cluster_name == "cluster_name_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.size_bytes == 1089 + + +@pytest.mark.asyncio +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) + + +def test_get_backup_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = resources.Backup() + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Backup()) + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + service.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup( + service.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateBackupRequest, + dict, + ], +) +def test_create_backup(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + client.create_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateBackupRequest() + + +@pytest.mark.asyncio +async def test_create_backup_async( + transport: str = "grpc_asyncio", request_type=service.CreateBackupRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_async_from_dict(): + await test_create_backup_async(request_type=dict) + + +def test_create_backup_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateBackupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateBackupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup( + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].backup_id + mock_val = "backup_id_value" + assert arg == mock_val + + +def test_create_backup_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup( + service.CreateBackupRequest(), + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup( + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].backup_id + mock_val = "backup_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup( + service.CreateBackupRequest(), + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateBackupRequest, + dict, + ], +) +def test_update_backup(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + client.update_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateBackupRequest() + + +@pytest.mark.asyncio +async def test_update_backup_async( + transport: str = "grpc_asyncio", request_type=service.UpdateBackupRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_backup_async_from_dict(): + await test_update_backup_async(request_type=dict) + + +def test_update_backup_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backup_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +def test_update_backup_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup( + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_backup_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + service.UpdateBackupRequest(), + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_backup_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup( + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_backup_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup( + service.UpdateBackupRequest(), + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteBackupRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_async( + transport: str = "grpc_asyncio", request_type=service.DeleteBackupRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) + + +def test_delete_backup_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + service.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup( + service.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListSupportedDatabaseFlagsRequest, + dict, + ], +) +def test_list_supported_database_flags(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSupportedDatabaseFlagsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_supported_database_flags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSupportedDatabaseFlagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSupportedDatabaseFlagsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_supported_database_flags_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + client.list_supported_database_flags() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSupportedDatabaseFlagsRequest() + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_async( + transport: str = "grpc_asyncio", + request_type=service.ListSupportedDatabaseFlagsRequest, +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSupportedDatabaseFlagsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_supported_database_flags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSupportedDatabaseFlagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSupportedDatabaseFlagsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_async_from_dict(): + await test_list_supported_database_flags_async(request_type=dict) + + +def test_list_supported_database_flags_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSupportedDatabaseFlagsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + call.return_value = service.ListSupportedDatabaseFlagsResponse() + client.list_supported_database_flags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSupportedDatabaseFlagsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSupportedDatabaseFlagsResponse() + ) + await client.list_supported_database_flags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_supported_database_flags_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSupportedDatabaseFlagsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_supported_database_flags( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_supported_database_flags_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_supported_database_flags( + service.ListSupportedDatabaseFlagsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSupportedDatabaseFlagsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSupportedDatabaseFlagsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_supported_database_flags( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_supported_database_flags( + service.ListSupportedDatabaseFlagsRequest(), + parent="parent_value", + ) + + +def test_list_supported_database_flags_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_supported_database_flags(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in results) + + +def test_list_supported_database_flags_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + pages = list(client.list_supported_database_flags(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_supported_database_flags( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in responses) + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_supported_database_flags(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListClustersRequest, + dict, + ], +) +def test_list_clusters_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_clusters_rest_required_fields(request_type=service.ListClustersRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_clusters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_clusters_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_clusters_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_clusters" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_clusters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListClustersRequest.pb(service.ListClustersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListClustersResponse.to_json( + service.ListClustersResponse() + ) + + request = service.ListClustersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListClustersResponse() + + client.list_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_clusters_rest_bad_request( + transport: str = "rest", request_type=service.ListClustersRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_clusters(request) + + +def test_list_clusters_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/clusters" % client.transport._host, + args[1], + ) + + +def test_list_clusters_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + service.ListClustersRequest(), + parent="parent_value", + ) + + +def test_list_clusters_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListClustersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Cluster) for i in results) + + pages = list(client.list_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetClusterRequest, + dict, + ], +) +def test_get_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Cluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Cluster.State.READY, + cluster_type=resources.Cluster.ClusterType.PRIMARY, + database_version=resources.DatabaseVersion.POSTGRES_13, + network="network_value", + etag="etag_value", + reconciling=True, + backup_source=resources.BackupSource(backup_uid="backup_uid_value"), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_cluster(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Cluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Cluster.State.READY + assert response.cluster_type == resources.Cluster.ClusterType.PRIMARY + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 + assert response.network == "network_value" + assert response.etag == "etag_value" + assert response.reconciling is True + + +def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Cluster() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_get_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetClusterRequest.pb(service.GetClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Cluster.to_json(resources.Cluster()) + + request = service.GetClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Cluster() + + client.get_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_cluster_rest_bad_request( + transport: str = "rest", request_type=service.GetClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_cluster(request) + + +def test_get_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Cluster() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}" % client.transport._host, + args[1], + ) + + +def test_get_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + service.GetClusterRequest(), + name="name_value", + ) + + +def test_get_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateClusterRequest, + dict, + ], +) +def test_create_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_cluster_rest_required_fields(request_type=service.CreateClusterRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "clusterId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == request_init["cluster_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "cluster_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_cluster(request) + + expected_params = [ + ( + "clusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "clusterId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "clusterId", + "cluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_create_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateClusterRequest.pb(service.CreateClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_cluster_rest_bad_request( + transport: str = "rest", request_type=service.CreateClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_cluster(request) + + +def test_create_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/clusters" % client.transport._host, + args[1], + ) + + +def test_create_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cluster( + service.CreateClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + +def test_create_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateClusterRequest, + dict, + ], +) +def test_update_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "projects/sample1/locations/sample2/clusters/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set(("cluster",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_update_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateClusterRequest.pb(service.UpdateClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.UpdateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_cluster_rest_bad_request( + transport: str = "rest", request_type=service.UpdateClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "projects/sample1/locations/sample2/clusters/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_cluster(request) + + +def test_update_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{cluster.name=projects/*/locations/*/clusters/*}" + % client.transport._host, + args[1], + ) + + +def test_update_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cluster( + service.UpdateClusterRequest(), + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "force", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "force", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_delete_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.DeleteClusterRequest.pb(service.DeleteClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.DeleteClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_cluster_rest_bad_request( + transport: str = "rest", request_type=service.DeleteClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_cluster(request) + + +def test_delete_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*}" % client.transport._host, + args[1], + ) + + +def test_delete_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cluster( + service.DeleteClusterRequest(), + name="name_value", + ) + + +def test_delete_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.RestoreClusterRequest, + dict, + ], +) +def test_restore_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.restore_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_restore_cluster_rest_required_fields( + request_type=service.RestoreClusterRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.restore_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_restore_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.restore_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "clusterId", + "cluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restore_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_restore_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_restore_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.RestoreClusterRequest.pb(service.RestoreClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.RestoreClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restore_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restore_cluster_rest_bad_request( + transport: str = "rest", request_type=service.RestoreClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restore_cluster(request) + + +def test_restore_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListInstancesRequest, + dict, + ], +) +def test_list_instances_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_rest_required_fields(request_type=service.ListInstancesRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_instances(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_instances" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_instances" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListInstancesRequest.pb(service.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListInstancesResponse.to_json( + service.ListInstancesResponse() + ) + + request = service.ListInstancesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListInstancesResponse() + + client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_instances_rest_bad_request( + transport: str = "rest", request_type=service.ListInstancesRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_instances(request) + + +def test_list_instances_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListInstancesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/clusters/*}/instances" + % client.transport._host, + args[1], + ) + + +def test_list_instances_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + service.ListInstancesRequest(), + parent="parent_value", + ) + + +def test_list_instances_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListInstancesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Instance) for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetInstanceRequest, + dict, + ], +) +def test_get_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Instance( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Instance.State.READY, + instance_type=resources.Instance.InstanceType.PRIMARY, + availability_type=resources.Instance.AvailabilityType.ZONAL, + gce_zone="gce_zone_value", + ip_address="ip_address_value", + reconciling=True, + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Instance.State.READY + assert response.instance_type == resources.Instance.InstanceType.PRIMARY + assert response.availability_type == resources.Instance.AvailabilityType.ZONAL + assert response.gce_zone == "gce_zone_value" + assert response.ip_address == "ip_address_value" + assert response.reconciling is True + assert response.etag == "etag_value" + + +def test_get_instance_rest_required_fields(request_type=service.GetInstanceRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = resources.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_get_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetInstanceRequest.pb(service.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Instance.to_json(resources.Instance()) + + request = service.GetInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Instance() + + client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_rest_bad_request( + transport: str = "rest", request_type=service.GetInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_instance(request) + + +def test_get_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_get_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + service.GetInstanceRequest(), + name="name_value", + ) + + +def test_get_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateInstanceRequest, + dict, + ], +) +def test_create_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_instance_rest_required_fields( + request_type=service.CreateInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "instanceId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == request_init["instance_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "instance_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == "instance_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_instance(request) + + expected_params = [ + ( + "instanceId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "instanceId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "instanceId", + "instance", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_create_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateInstanceRequest.pb(service.CreateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_instance_rest_bad_request( + transport: str = "rest", request_type=service.CreateInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_instance(request) + + +def test_create_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/clusters/*}/instances" + % client.transport._host, + args[1], + ) + + +def test_create_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + service.CreateInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +def test_create_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.BatchCreateInstancesRequest, + dict, + ], +) +def test_batch_create_instances_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["requests"] = { + "create_instance_requests": [ + { + "parent": "parent_value", + "instance_id": "instance_id_value", + "instance": { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + }, + "request_id": "request_id_value", + "validate_only": True, + } + ] + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_create_instances(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_create_instances_rest_required_fields( + request_type=service.BatchCreateInstancesRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_create_instances(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_create_instances_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_create_instances._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_create_instances_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_batch_create_instances" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_batch_create_instances" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.BatchCreateInstancesRequest.pb( + service.BatchCreateInstancesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.BatchCreateInstancesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_create_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_create_instances_rest_bad_request( + transport: str = "rest", request_type=service.BatchCreateInstancesRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["requests"] = { + "create_instance_requests": [ + { + "parent": "parent_value", + "instance_id": "instance_id_value", + "instance": { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + }, + "request_id": "request_id_value", + "validate_only": True, + } + ] + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_create_instances(request) + + +def test_batch_create_instances_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_instance_rest_required_fields( + request_type=service.UpdateInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set(("instance",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_update_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateInstanceRequest.pb(service.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.UpdateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_instance_rest_bad_request( + transport: str = "rest", request_type=service.UpdateInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_instance(request) + + +def test_update_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "instance": { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{instance.name=projects/*/locations/*/clusters/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_update_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + service.UpdateInstanceRequest(), + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteInstanceRequest, + dict, + ], +) +def test_delete_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_instance_rest_required_fields( + request_type=service.DeleteInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_delete_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.DeleteInstanceRequest.pb(service.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.DeleteInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_instance_rest_bad_request( + transport: str = "rest", request_type=service.DeleteInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_instance(request) + + +def test_delete_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + service.DeleteInstanceRequest(), + name="name_value", + ) + + +def test_delete_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.FailoverInstanceRequest, + dict, + ], +) +def test_failover_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.failover_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_failover_instance_rest_required_fields( + request_type=service.FailoverInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).failover_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).failover_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.failover_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_failover_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.failover_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_failover_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_failover_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_failover_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.FailoverInstanceRequest.pb( + service.FailoverInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.FailoverInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.failover_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_failover_instance_rest_bad_request( + transport: str = "rest", request_type=service.FailoverInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.failover_instance(request) + + +def test_failover_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.failover_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*/instances/*}:failover" + % client.transport._host, + args[1], + ) + + +def test_failover_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.failover_instance( + service.FailoverInstanceRequest(), + name="name_value", + ) + + +def test_failover_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.RestartInstanceRequest, + dict, + ], +) +def test_restart_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.restart_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_restart_instance_rest_required_fields( + request_type=service.RestartInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restart_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restart_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.restart_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_restart_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.restart_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restart_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_restart_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_restart_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.RestartInstanceRequest.pb(service.RestartInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.RestartInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restart_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restart_instance_rest_bad_request( + transport: str = "rest", request_type=service.RestartInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restart_instance(request) + + +def test_restart_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.restart_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/clusters/*/instances/*}:restart" + % client.transport._host, + args[1], + ) + + +def test_restart_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restart_instance( + service.RestartInstanceRequest(), + name="name_value", + ) + + +def test_restart_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListBackupsRequest, + dict, + ], +) +def test_list_backups_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_backups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_rest_required_fields(request_type=service.ListBackupsRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListBackupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_backups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backups_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backups_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_backups" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_backups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListBackupsRequest.pb(service.ListBackupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListBackupsResponse.to_json( + service.ListBackupsResponse() + ) + + request = service.ListBackupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListBackupsResponse() + + client.list_backups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backups_rest_bad_request( + transport: str = "rest", request_type=service.ListBackupsRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backups(request) + + +def test_list_backups_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListBackupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backups" % client.transport._host, + args[1], + ) + + +def test_list_backups_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + service.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListBackupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Backup) for i in results) + + pages = list(client.list_backups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetBackupRequest, + dict, + ], +) +def test_get_backup_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Backup( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Backup.State.READY, + type_=resources.Backup.Type.ON_DEMAND, + description="description_value", + cluster_uid="cluster_uid_value", + cluster_name="cluster_name_value", + reconciling=True, + etag="etag_value", + size_bytes=1089, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Backup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Backup.State.READY + assert response.type_ == resources.Backup.Type.ON_DEMAND + assert response.description == "description_value" + assert response.cluster_uid == "cluster_uid_value" + assert response.cluster_name == "cluster_name_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.size_bytes == 1089 + + +def test_get_backup_rest_required_fields(request_type=service.GetBackupRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Backup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = resources.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_backup" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_get_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetBackupRequest.pb(service.GetBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Backup.to_json(resources.Backup()) + + request = service.GetBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Backup() + + client.get_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_rest_bad_request( + transport: str = "rest", request_type=service.GetBackupRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup(request) + + +def test_get_backup_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Backup() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backups/*}" % client.transport._host, + args[1], + ) + + +def test_get_backup_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + service.GetBackupRequest(), + name="name_value", + ) + + +def test_get_backup_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateBackupRequest, + dict, + ], +) +def test_create_backup_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "type_": 1, + "description": "description_value", + "cluster_uid": "cluster_uid_value", + "cluster_name": "cluster_name_value", + "reconciling": True, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "etag": "etag_value", + "annotations": {}, + "size_bytes": 1089, + "expiry_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_backup_rest_required_fields(request_type=service.CreateBackupRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "backupId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == request_init["backup_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupId"] = "backup_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == "backup_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_backup(request) + + expected_params = [ + ( + "backupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "backupId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "backupId", + "backup", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_backup" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_create_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateBackupRequest.pb(service.CreateBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_rest_bad_request( + transport: str = "rest", request_type=service.CreateBackupRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "type_": 1, + "description": "description_value", + "cluster_uid": "cluster_uid_value", + "cluster_name": "cluster_name_value", + "reconciling": True, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "etag": "etag_value", + "annotations": {}, + "size_bytes": 1089, + "expiry_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup(request) + + +def test_create_backup_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backups" % client.transport._host, + args[1], + ) + + +def test_create_backup_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup( + service.CreateBackupRequest(), + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + +def test_create_backup_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateBackupRequest, + dict, + ], +) +def test_update_backup_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "backup": {"name": "projects/sample1/locations/sample2/backups/sample3"} + } + request_init["backup"] = { + "name": "projects/sample1/locations/sample2/backups/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "type_": 1, + "description": "description_value", + "cluster_uid": "cluster_uid_value", + "cluster_name": "cluster_name_value", + "reconciling": True, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "etag": "etag_value", + "annotations": {}, + "size_bytes": 1089, + "expiry_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_backup_rest_required_fields(request_type=service.UpdateBackupRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_backup_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set(("backup",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_backup" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_update_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateBackupRequest.pb(service.UpdateBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.UpdateBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_backup_rest_bad_request( + transport: str = "rest", request_type=service.UpdateBackupRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "backup": {"name": "projects/sample1/locations/sample2/backups/sample3"} + } + request_init["backup"] = { + "name": "projects/sample1/locations/sample2/backups/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "type_": 1, + "description": "description_value", + "cluster_uid": "cluster_uid_value", + "cluster_name": "cluster_name_value", + "reconciling": True, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "etag": "etag_value", + "annotations": {}, + "size_bytes": 1089, + "expiry_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_backup(request) + + +def test_update_backup_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backup": {"name": "projects/sample1/locations/sample2/backups/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{backup.name=projects/*/locations/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_backup_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + service.UpdateBackupRequest(), + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_backup_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_backup_rest_required_fields(request_type=service.DeleteBackupRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_backup" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_delete_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.DeleteBackupRequest.pb(service.DeleteBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.DeleteBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_rest_bad_request( + transport: str = "rest", request_type=service.DeleteBackupRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup(request) + + +def test_delete_backup_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backups/*}" % client.transport._host, + args[1], + ) + + +def test_delete_backup_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + service.DeleteBackupRequest(), + name="name_value", + ) + + +def test_delete_backup_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListSupportedDatabaseFlagsRequest, + dict, + ], +) +def test_list_supported_database_flags_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListSupportedDatabaseFlagsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListSupportedDatabaseFlagsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_supported_database_flags(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSupportedDatabaseFlagsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_supported_database_flags_rest_required_fields( + request_type=service.ListSupportedDatabaseFlagsRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_supported_database_flags._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_supported_database_flags._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListSupportedDatabaseFlagsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListSupportedDatabaseFlagsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_supported_database_flags(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_supported_database_flags_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_supported_database_flags._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_supported_database_flags_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_supported_database_flags" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_supported_database_flags" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListSupportedDatabaseFlagsRequest.pb( + service.ListSupportedDatabaseFlagsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListSupportedDatabaseFlagsResponse.to_json( + service.ListSupportedDatabaseFlagsResponse() + ) + + request = service.ListSupportedDatabaseFlagsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListSupportedDatabaseFlagsResponse() + + client.list_supported_database_flags( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_supported_database_flags_rest_bad_request( + transport: str = "rest", request_type=service.ListSupportedDatabaseFlagsRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_supported_database_flags(request) + + +def test_list_supported_database_flags_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListSupportedDatabaseFlagsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListSupportedDatabaseFlagsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_supported_database_flags(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/supportedDatabaseFlags" + % client.transport._host, + args[1], + ) + + +def test_list_supported_database_flags_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_supported_database_flags( + service.ListSupportedDatabaseFlagsRequest(), + parent="parent_value", + ) + + +def test_list_supported_database_flags_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + service.ListSupportedDatabaseFlagsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_supported_database_flags(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in results) + + pages = list(client.list_supported_database_flags(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AlloyDBAdminClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AlloyDBAdminClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AlloyDBAdminClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AlloyDBAdminClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AlloyDBAdminClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AlloyDBAdminGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AlloyDBAdminGrpcTransport, + transports.AlloyDBAdminGrpcAsyncIOTransport, + transports.AlloyDBAdminRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AlloyDBAdminClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AlloyDBAdminGrpcTransport, + ) + + +def test_alloy_db_admin_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AlloyDBAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_alloy_db_admin_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.alloydb_v1.services.alloy_db_admin.transports.AlloyDBAdminTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AlloyDBAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_clusters", + "get_cluster", + "create_cluster", + "update_cluster", + "delete_cluster", + "restore_cluster", + "list_instances", + "get_instance", + "create_instance", + "batch_create_instances", + "update_instance", + "delete_instance", + "failover_instance", + "restart_instance", + "list_backups", + "get_backup", + "create_backup", + "update_backup", + "delete_backup", + "list_supported_database_flags", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_alloy_db_admin_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.alloydb_v1.services.alloy_db_admin.transports.AlloyDBAdminTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AlloyDBAdminTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_alloy_db_admin_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.alloydb_v1.services.alloy_db_admin.transports.AlloyDBAdminTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AlloyDBAdminTransport() + adc.assert_called_once() + + +def test_alloy_db_admin_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AlloyDBAdminClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AlloyDBAdminGrpcTransport, + transports.AlloyDBAdminGrpcAsyncIOTransport, + ], +) +def test_alloy_db_admin_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AlloyDBAdminGrpcTransport, + transports.AlloyDBAdminGrpcAsyncIOTransport, + transports.AlloyDBAdminRestTransport, + ], +) +def test_alloy_db_admin_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AlloyDBAdminGrpcTransport, grpc_helpers), + (transports.AlloyDBAdminGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_alloy_db_admin_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "alloydb.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="alloydb.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.AlloyDBAdminGrpcTransport, transports.AlloyDBAdminGrpcAsyncIOTransport], +) +def test_alloy_db_admin_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_alloy_db_admin_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AlloyDBAdminRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_alloy_db_admin_rest_lro_client(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_alloy_db_admin_host_no_port(transport_name): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="alloydb.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "alloydb.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://alloydb.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_alloy_db_admin_host_with_port(transport_name): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="alloydb.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "alloydb.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://alloydb.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_alloy_db_admin_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AlloyDBAdminClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AlloyDBAdminClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_clusters._session + session2 = client2.transport.list_clusters._session + assert session1 != session2 + session1 = client1.transport.get_cluster._session + session2 = client2.transport.get_cluster._session + assert session1 != session2 + session1 = client1.transport.create_cluster._session + session2 = client2.transport.create_cluster._session + assert session1 != session2 + session1 = client1.transport.update_cluster._session + session2 = client2.transport.update_cluster._session + assert session1 != session2 + session1 = client1.transport.delete_cluster._session + session2 = client2.transport.delete_cluster._session + assert session1 != session2 + session1 = client1.transport.restore_cluster._session + session2 = client2.transport.restore_cluster._session + assert session1 != session2 + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.get_instance._session + session2 = client2.transport.get_instance._session + assert session1 != session2 + session1 = client1.transport.create_instance._session + session2 = client2.transport.create_instance._session + assert session1 != session2 + session1 = client1.transport.batch_create_instances._session + session2 = client2.transport.batch_create_instances._session + assert session1 != session2 + session1 = client1.transport.update_instance._session + session2 = client2.transport.update_instance._session + assert session1 != session2 + session1 = client1.transport.delete_instance._session + session2 = client2.transport.delete_instance._session + assert session1 != session2 + session1 = client1.transport.failover_instance._session + session2 = client2.transport.failover_instance._session + assert session1 != session2 + session1 = client1.transport.restart_instance._session + session2 = client2.transport.restart_instance._session + assert session1 != session2 + session1 = client1.transport.list_backups._session + session2 = client2.transport.list_backups._session + assert session1 != session2 + session1 = client1.transport.get_backup._session + session2 = client2.transport.get_backup._session + assert session1 != session2 + session1 = client1.transport.create_backup._session + session2 = client2.transport.create_backup._session + assert session1 != session2 + session1 = client1.transport.update_backup._session + session2 = client2.transport.update_backup._session + assert session1 != session2 + session1 = client1.transport.delete_backup._session + session2 = client2.transport.delete_backup._session + assert session1 != session2 + session1 = client1.transport.list_supported_database_flags._session + session2 = client2.transport.list_supported_database_flags._session + assert session1 != session2 + + +def test_alloy_db_admin_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AlloyDBAdminGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_alloy_db_admin_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AlloyDBAdminGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.AlloyDBAdminGrpcTransport, transports.AlloyDBAdminGrpcAsyncIOTransport], +) +def test_alloy_db_admin_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.AlloyDBAdminGrpcTransport, transports.AlloyDBAdminGrpcAsyncIOTransport], +) +def test_alloy_db_admin_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_alloy_db_admin_grpc_lro_client(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_alloy_db_admin_grpc_lro_async_client(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_backup_path(): + project = "squid" + location = "clam" + backup = "whelk" + expected = "projects/{project}/locations/{location}/backups/{backup}".format( + project=project, + location=location, + backup=backup, + ) + actual = AlloyDBAdminClient.backup_path(project, location, backup) + assert expected == actual + + +def test_parse_backup_path(): + expected = { + "project": "octopus", + "location": "oyster", + "backup": "nudibranch", + } + path = AlloyDBAdminClient.backup_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_backup_path(path) + assert expected == actual + + +def test_cluster_path(): + project = "cuttlefish" + location = "mussel" + cluster = "winkle" + expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, + location=location, + cluster=cluster, + ) + actual = AlloyDBAdminClient.cluster_path(project, location, cluster) + assert expected == actual + + +def test_parse_cluster_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "cluster": "abalone", + } + path = AlloyDBAdminClient.cluster_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_cluster_path(path) + assert expected == actual + + +def test_crypto_key_version_path(): + project = "squid" + location = "clam" + key_ring = "whelk" + crypto_key = "octopus" + crypto_key_version = "oyster" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + crypto_key_version=crypto_key_version, + ) + actual = AlloyDBAdminClient.crypto_key_version_path( + project, location, key_ring, crypto_key, crypto_key_version + ) + assert expected == actual + + +def test_parse_crypto_key_version_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "key_ring": "mussel", + "crypto_key": "winkle", + "crypto_key_version": "nautilus", + } + path = AlloyDBAdminClient.crypto_key_version_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_crypto_key_version_path(path) + assert expected == actual + + +def test_instance_path(): + project = "scallop" + location = "abalone" + cluster = "squid" + instance = "clam" + expected = "projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance}".format( + project=project, + location=location, + cluster=cluster, + instance=instance, + ) + actual = AlloyDBAdminClient.instance_path(project, location, cluster, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "whelk", + "location": "octopus", + "cluster": "oyster", + "instance": "nudibranch", + } + path = AlloyDBAdminClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_instance_path(path) + assert expected == actual + + +def test_network_path(): + project = "cuttlefish" + network = "mussel" + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + actual = AlloyDBAdminClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "winkle", + "network": "nautilus", + } + path = AlloyDBAdminClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_network_path(path) + assert expected == actual + + +def test_supported_database_flag_path(): + project = "scallop" + location = "abalone" + flag = "squid" + expected = "projects/{project}/locations/{location}/flags/{flag}".format( + project=project, + location=location, + flag=flag, + ) + actual = AlloyDBAdminClient.supported_database_flag_path(project, location, flag) + assert expected == actual + + +def test_parse_supported_database_flag_path(): + expected = { + "project": "clam", + "location": "whelk", + "flag": "octopus", + } + path = AlloyDBAdminClient.supported_database_flag_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_supported_database_flag_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AlloyDBAdminClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = AlloyDBAdminClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AlloyDBAdminClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = AlloyDBAdminClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AlloyDBAdminClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = AlloyDBAdminClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = AlloyDBAdminClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = AlloyDBAdminClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AlloyDBAdminClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = AlloyDBAdminClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AlloyDBAdminTransport, "_prep_wrapped_messages" + ) as prep: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AlloyDBAdminTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AlloyDBAdminClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = AlloyDBAdminClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = AlloyDBAdminAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport), + (AlloyDBAdminAsyncClient, transports.AlloyDBAdminGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/__init__.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py new file mode 100644 index 000000000000..1239487f58bd --- /dev/null +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py @@ -0,0 +1,17678 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.alloydb_v1alpha.services.alloy_db_admin import ( + AlloyDBAdminAsyncClient, + AlloyDBAdminClient, + pagers, + transports, +) +from google.cloud.alloydb_v1alpha.types import resources, service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AlloyDBAdminClient._get_default_mtls_endpoint(None) is None + assert ( + AlloyDBAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + AlloyDBAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AlloyDBAdminClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AlloyDBAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert AlloyDBAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AlloyDBAdminClient, "grpc"), + (AlloyDBAdminAsyncClient, "grpc_asyncio"), + (AlloyDBAdminClient, "rest"), + ], +) +def test_alloy_db_admin_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "alloydb.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://alloydb.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AlloyDBAdminGrpcTransport, "grpc"), + (transports.AlloyDBAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AlloyDBAdminRestTransport, "rest"), + ], +) +def test_alloy_db_admin_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AlloyDBAdminClient, "grpc"), + (AlloyDBAdminAsyncClient, "grpc_asyncio"), + (AlloyDBAdminClient, "rest"), + ], +) +def test_alloy_db_admin_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "alloydb.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://alloydb.googleapis.com" + ) + + +def test_alloy_db_admin_client_get_transport_class(): + transport = AlloyDBAdminClient.get_transport_class() + available_transports = [ + transports.AlloyDBAdminGrpcTransport, + transports.AlloyDBAdminRestTransport, + ] + assert transport in available_transports + + transport = AlloyDBAdminClient.get_transport_class("grpc") + assert transport == transports.AlloyDBAdminGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport, "grpc"), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest"), + ], +) +@mock.patch.object( + AlloyDBAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlloyDBAdminClient) +) +@mock.patch.object( + AlloyDBAdminAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AlloyDBAdminAsyncClient), +) +def test_alloy_db_admin_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AlloyDBAdminClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AlloyDBAdminClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport, "grpc", "true"), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport, "grpc", "false"), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest", "true"), + (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + AlloyDBAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlloyDBAdminClient) +) +@mock.patch.object( + AlloyDBAdminAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AlloyDBAdminAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_alloy_db_admin_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [AlloyDBAdminClient, AlloyDBAdminAsyncClient]) +@mock.patch.object( + AlloyDBAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlloyDBAdminClient) +) +@mock.patch.object( + AlloyDBAdminAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AlloyDBAdminAsyncClient), +) +def test_alloy_db_admin_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport, "grpc"), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest"), + ], +) +def test_alloy_db_admin_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AlloyDBAdminClient, + transports.AlloyDBAdminGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest", None), + ], +) +def test_alloy_db_admin_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_alloy_db_admin_client_client_options_from_dict(): + with mock.patch( + "google.cloud.alloydb_v1alpha.services.alloy_db_admin.transports.AlloyDBAdminGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AlloyDBAdminClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AlloyDBAdminClient, + transports.AlloyDBAdminGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_alloy_db_admin_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "alloydb.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="alloydb.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListClustersRequest, + dict, + ], +) +def test_list_clusters(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_clusters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + client.list_clusters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListClustersRequest() + + +@pytest.mark.asyncio +async def test_list_clusters_async( + transport: str = "grpc_asyncio", request_type=service.ListClustersRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_clusters_async_from_dict(): + await test_list_clusters_async(request_type=dict) + + +def test_list_clusters_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = service.ListClustersResponse() + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_clusters_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListClustersResponse() + ) + await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_clusters_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListClustersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_clusters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_clusters_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + service.ListClustersRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_clusters_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListClustersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListClustersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_clusters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_clusters_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_clusters( + service.ListClustersRequest(), + parent="parent_value", + ) + + +def test_list_clusters_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_clusters(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Cluster) for i in results) + + +def test_list_clusters_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + pages = list(client.list_clusters(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_clusters_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_clusters), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_clusters( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Cluster) for i in responses) + + +@pytest.mark.asyncio +async def test_list_clusters_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_clusters), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_clusters(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetClusterRequest, + dict, + ], +) +def test_get_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Cluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Cluster.State.READY, + cluster_type=resources.Cluster.ClusterType.PRIMARY, + database_version=resources.DatabaseVersion.POSTGRES_13, + network="network_value", + etag="etag_value", + reconciling=True, + backup_source=resources.BackupSource(backup_uid="backup_uid_value"), + ) + response = client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Cluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Cluster.State.READY + assert response.cluster_type == resources.Cluster.ClusterType.PRIMARY + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 + assert response.network == "network_value" + assert response.etag == "etag_value" + assert response.reconciling is True + + +def test_get_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + client.get_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetClusterRequest() + + +@pytest.mark.asyncio +async def test_get_cluster_async( + transport: str = "grpc_asyncio", request_type=service.GetClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Cluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Cluster.State.READY, + cluster_type=resources.Cluster.ClusterType.PRIMARY, + database_version=resources.DatabaseVersion.POSTGRES_13, + network="network_value", + etag="etag_value", + reconciling=True, + ) + ) + response = await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Cluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Cluster.State.READY + assert response.cluster_type == resources.Cluster.ClusterType.PRIMARY + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 + assert response.network == "network_value" + assert response.etag == "etag_value" + assert response.reconciling is True + + +@pytest.mark.asyncio +async def test_get_cluster_async_from_dict(): + await test_get_cluster_async(request_type=dict) + + +def test_get_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = resources.Cluster() + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Cluster()) + await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Cluster() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + service.GetClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Cluster() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Cluster()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_cluster( + service.GetClusterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateClusterRequest, + dict, + ], +) +def test_create_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + client.create_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateClusterRequest() + + +@pytest.mark.asyncio +async def test_create_cluster_async( + transport: str = "grpc_asyncio", request_type=service.CreateClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_cluster_async_from_dict(): + await test_create_cluster_async(request_type=dict) + + +def test_create_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_cluster( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +def test_create_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cluster( + service.CreateClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_cluster( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_cluster( + service.CreateClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateClusterRequest, + dict, + ], +) +def test_update_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + client.update_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateClusterRequest() + + +@pytest.mark.asyncio +async def test_update_cluster_async( + transport: str = "grpc_asyncio", request_type=service.UpdateClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_cluster_async_from_dict(): + await test_update_cluster_async(request_type=dict) + + +def test_update_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateClusterRequest() + + request.cluster.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "cluster.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateClusterRequest() + + request.cluster.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "cluster.name=name_value", + ) in kw["metadata"] + + +def test_update_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_cluster( + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cluster( + service.UpdateClusterRequest(), + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_cluster( + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_cluster( + service.UpdateClusterRequest(), + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + client.delete_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteClusterRequest() + + +@pytest.mark.asyncio +async def test_delete_cluster_async( + transport: str = "grpc_asyncio", request_type=service.DeleteClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_cluster_async_from_dict(): + await test_delete_cluster_async(request_type=dict) + + +def test_delete_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cluster( + service.DeleteClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_cluster( + service.DeleteClusterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.PromoteClusterRequest, + dict, + ], +) +def test_promote_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.promote_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.PromoteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_promote_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + client.promote_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.PromoteClusterRequest() + + +@pytest.mark.asyncio +async def test_promote_cluster_async( + transport: str = "grpc_asyncio", request_type=service.PromoteClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.promote_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.PromoteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_promote_cluster_async_from_dict(): + await test_promote_cluster_async(request_type=dict) + + +def test_promote_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.PromoteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.promote_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_promote_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.PromoteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.promote_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_promote_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.promote_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_promote_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.promote_cluster( + service.PromoteClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_promote_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.promote_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_promote_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.promote_cluster( + service.PromoteClusterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.RestoreClusterRequest, + dict, + ], +) +def test_restore_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restore_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestoreClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restore_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + client.restore_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestoreClusterRequest() + + +@pytest.mark.asyncio +async def test_restore_cluster_async( + transport: str = "grpc_asyncio", request_type=service.RestoreClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestoreClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restore_cluster_async_from_dict(): + await test_restore_cluster_async(request_type=dict) + + +def test_restore_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RestoreClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_restore_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RestoreClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.restore_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateSecondaryClusterRequest, + dict, + ], +) +def test_create_secondary_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_secondary_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSecondaryClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_secondary_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + client.create_secondary_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSecondaryClusterRequest() + + +@pytest.mark.asyncio +async def test_create_secondary_cluster_async( + transport: str = "grpc_asyncio", request_type=service.CreateSecondaryClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_secondary_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSecondaryClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_secondary_cluster_async_from_dict(): + await test_create_secondary_cluster_async(request_type=dict) + + +def test_create_secondary_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateSecondaryClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_secondary_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_secondary_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateSecondaryClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_secondary_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_secondary_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_secondary_cluster( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +def test_create_secondary_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_secondary_cluster( + service.CreateSecondaryClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_secondary_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_secondary_cluster( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_secondary_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_secondary_cluster( + service.CreateSecondaryClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListInstancesRequest, + dict, + ], +) +def test_list_instances(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListInstancesRequest() + + +@pytest.mark.asyncio +async def test_list_instances_async( + transport: str = "grpc_asyncio", request_type=service.ListInstancesRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + + +def test_list_instances_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = service.ListInstancesResponse() + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_instances_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse() + ) + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_instances_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListInstancesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instances( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_instances_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + service.ListInstancesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_instances_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListInstancesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instances( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_instances_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instances( + service.ListInstancesRequest(), + parent="parent_value", + ) + + +def test_list_instances_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_instances(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Instance) for i in results) + + +def test_list_instances_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instances( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Instance) for i in responses) + + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_instances(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetInstanceRequest, + dict, + ], +) +def test_get_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Instance( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Instance.State.READY, + instance_type=resources.Instance.InstanceType.PRIMARY, + availability_type=resources.Instance.AvailabilityType.ZONAL, + gce_zone="gce_zone_value", + ip_address="ip_address_value", + reconciling=True, + etag="etag_value", + ) + response = client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Instance.State.READY + assert response.instance_type == resources.Instance.InstanceType.PRIMARY + assert response.availability_type == resources.Instance.AvailabilityType.ZONAL + assert response.gce_zone == "gce_zone_value" + assert response.ip_address == "ip_address_value" + assert response.reconciling is True + assert response.etag == "etag_value" + + +def test_get_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetInstanceRequest() + + +@pytest.mark.asyncio +async def test_get_instance_async( + transport: str = "grpc_asyncio", request_type=service.GetInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Instance( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Instance.State.READY, + instance_type=resources.Instance.InstanceType.PRIMARY, + availability_type=resources.Instance.AvailabilityType.ZONAL, + gce_zone="gce_zone_value", + ip_address="ip_address_value", + reconciling=True, + etag="etag_value", + ) + ) + response = await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Instance.State.READY + assert response.instance_type == resources.Instance.InstanceType.PRIMARY + assert response.availability_type == resources.Instance.AvailabilityType.ZONAL + assert response.gce_zone == "gce_zone_value" + assert response.ip_address == "ip_address_value" + assert response.reconciling is True + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + + +def test_get_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = resources.Instance() + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Instance()) + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Instance() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + service.GetInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Instance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Instance()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance( + service.GetInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateInstanceRequest, + dict, + ], +) +def test_create_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateInstanceRequest() + + +@pytest.mark.asyncio +async def test_create_instance_async( + transport: str = "grpc_asyncio", request_type=service.CreateInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + + +def test_create_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateInstanceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateInstanceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +def test_create_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + service.CreateInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance( + service.CreateInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateSecondaryInstanceRequest, + dict, + ], +) +def test_create_secondary_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_secondary_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSecondaryInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_secondary_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + client.create_secondary_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSecondaryInstanceRequest() + + +@pytest.mark.asyncio +async def test_create_secondary_instance_async( + transport: str = "grpc_asyncio", request_type=service.CreateSecondaryInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_secondary_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSecondaryInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_secondary_instance_async_from_dict(): + await test_create_secondary_instance_async(request_type=dict) + + +def test_create_secondary_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateSecondaryInstanceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_secondary_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_secondary_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateSecondaryInstanceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_secondary_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_secondary_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_secondary_instance( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +def test_create_secondary_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_secondary_instance( + service.CreateSecondaryInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_secondary_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_secondary_instance( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_secondary_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_secondary_instance( + service.CreateSecondaryInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.BatchCreateInstancesRequest, + dict, + ], +) +def test_batch_create_instances(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.BatchCreateInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_batch_create_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + client.batch_create_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.BatchCreateInstancesRequest() + + +@pytest.mark.asyncio +async def test_batch_create_instances_async( + transport: str = "grpc_asyncio", request_type=service.BatchCreateInstancesRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.BatchCreateInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_batch_create_instances_async_from_dict(): + await test_batch_create_instances_async(request_type=dict) + + +def test_batch_create_instances_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.BatchCreateInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_create_instances_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.BatchCreateInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateInstanceRequest() + + +@pytest.mark.asyncio +async def test_update_instance_async( + transport: str = "grpc_asyncio", request_type=service.UpdateInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_async_from_dict(): + await test_update_instance_async(request_type=dict) + + +def test_update_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateInstanceRequest() + + request.instance.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateInstanceRequest() + + request.instance.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] + + +def test_update_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance( + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + service.UpdateInstanceRequest(), + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance( + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance( + service.UpdateInstanceRequest(), + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteInstanceRequest, + dict, + ], +) +def test_delete_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteInstanceRequest() + + +@pytest.mark.asyncio +async def test_delete_instance_async( + transport: str = "grpc_asyncio", request_type=service.DeleteInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + + +def test_delete_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + service.DeleteInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance( + service.DeleteInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.FailoverInstanceRequest, + dict, + ], +) +def test_failover_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.FailoverInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_failover_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + client.failover_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.FailoverInstanceRequest() + + +@pytest.mark.asyncio +async def test_failover_instance_async( + transport: str = "grpc_asyncio", request_type=service.FailoverInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.FailoverInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_failover_instance_async_from_dict(): + await test_failover_instance_async(request_type=dict) + + +def test_failover_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.FailoverInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_failover_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.FailoverInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_failover_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.failover_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_failover_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.failover_instance( + service.FailoverInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_failover_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.failover_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_failover_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.failover_instance( + service.FailoverInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.RestartInstanceRequest, + dict, + ], +) +def test_restart_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restart_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestartInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restart_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + client.restart_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestartInstanceRequest() + + +@pytest.mark.asyncio +async def test_restart_instance_async( + transport: str = "grpc_asyncio", request_type=service.RestartInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restart_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestartInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restart_instance_async_from_dict(): + await test_restart_instance_async(request_type=dict) + + +def test_restart_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RestartInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restart_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_restart_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RestartInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.restart_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_restart_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.restart_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_restart_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restart_instance( + service.RestartInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_restart_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.restart_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_restart_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.restart_instance( + service.RestartInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListBackupsRequest, + dict, + ], +) +def test_list_backups(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListBackupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListBackupsRequest() + + +@pytest.mark.asyncio +async def test_list_backups_async( + transport: str = "grpc_asyncio", request_type=service.ListBackupsRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListBackupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) + + +def test_list_backups_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = service.ListBackupsResponse() + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backups_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListBackupsResponse() + ) + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backups_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListBackupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backups_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + service.ListBackupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backups_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListBackupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListBackupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backups_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backups( + service.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backups(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Backup) for i in results) + + +def test_list_backups_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backups_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Backup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backups_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_backups(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetBackupRequest, + dict, + ], +) +def test_get_backup(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Backup( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Backup.State.READY, + type_=resources.Backup.Type.ON_DEMAND, + description="description_value", + cluster_uid="cluster_uid_value", + cluster_name="cluster_name_value", + reconciling=True, + etag="etag_value", + size_bytes=1089, + ) + response = client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Backup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Backup.State.READY + assert response.type_ == resources.Backup.Type.ON_DEMAND + assert response.description == "description_value" + assert response.cluster_uid == "cluster_uid_value" + assert response.cluster_name == "cluster_name_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.size_bytes == 1089 + + +def test_get_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetBackupRequest() + + +@pytest.mark.asyncio +async def test_get_backup_async( + transport: str = "grpc_asyncio", request_type=service.GetBackupRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Backup( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Backup.State.READY, + type_=resources.Backup.Type.ON_DEMAND, + description="description_value", + cluster_uid="cluster_uid_value", + cluster_name="cluster_name_value", + reconciling=True, + etag="etag_value", + size_bytes=1089, + ) + ) + response = await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Backup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Backup.State.READY + assert response.type_ == resources.Backup.Type.ON_DEMAND + assert response.description == "description_value" + assert response.cluster_uid == "cluster_uid_value" + assert response.cluster_name == "cluster_name_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.size_bytes == 1089 + + +@pytest.mark.asyncio +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) + + +def test_get_backup_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = resources.Backup() + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Backup()) + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + service.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup( + service.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateBackupRequest, + dict, + ], +) +def test_create_backup(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + client.create_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateBackupRequest() + + +@pytest.mark.asyncio +async def test_create_backup_async( + transport: str = "grpc_asyncio", request_type=service.CreateBackupRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_async_from_dict(): + await test_create_backup_async(request_type=dict) + + +def test_create_backup_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateBackupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateBackupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup( + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].backup_id + mock_val = "backup_id_value" + assert arg == mock_val + + +def test_create_backup_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup( + service.CreateBackupRequest(), + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup( + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].backup_id + mock_val = "backup_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup( + service.CreateBackupRequest(), + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateBackupRequest, + dict, + ], +) +def test_update_backup(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + client.update_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateBackupRequest() + + +@pytest.mark.asyncio +async def test_update_backup_async( + transport: str = "grpc_asyncio", request_type=service.UpdateBackupRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_backup_async_from_dict(): + await test_update_backup_async(request_type=dict) + + +def test_update_backup_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backup_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +def test_update_backup_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup( + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_backup_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + service.UpdateBackupRequest(), + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_backup_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup( + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_backup_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup( + service.UpdateBackupRequest(), + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteBackupRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_async( + transport: str = "grpc_asyncio", request_type=service.DeleteBackupRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) + + +def test_delete_backup_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + service.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup( + service.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListSupportedDatabaseFlagsRequest, + dict, + ], +) +def test_list_supported_database_flags(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSupportedDatabaseFlagsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_supported_database_flags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSupportedDatabaseFlagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSupportedDatabaseFlagsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_supported_database_flags_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + client.list_supported_database_flags() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSupportedDatabaseFlagsRequest() + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_async( + transport: str = "grpc_asyncio", + request_type=service.ListSupportedDatabaseFlagsRequest, +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSupportedDatabaseFlagsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_supported_database_flags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSupportedDatabaseFlagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSupportedDatabaseFlagsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_async_from_dict(): + await test_list_supported_database_flags_async(request_type=dict) + + +def test_list_supported_database_flags_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSupportedDatabaseFlagsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + call.return_value = service.ListSupportedDatabaseFlagsResponse() + client.list_supported_database_flags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSupportedDatabaseFlagsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSupportedDatabaseFlagsResponse() + ) + await client.list_supported_database_flags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_supported_database_flags_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSupportedDatabaseFlagsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_supported_database_flags( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_supported_database_flags_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_supported_database_flags( + service.ListSupportedDatabaseFlagsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSupportedDatabaseFlagsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSupportedDatabaseFlagsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_supported_database_flags( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_supported_database_flags( + service.ListSupportedDatabaseFlagsRequest(), + parent="parent_value", + ) + + +def test_list_supported_database_flags_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_supported_database_flags(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in results) + + +def test_list_supported_database_flags_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + pages = list(client.list_supported_database_flags(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_supported_database_flags( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in responses) + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_supported_database_flags(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GenerateClientCertificateRequest, + dict, + ], +) +def test_generate_client_certificate(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.GenerateClientCertificateResponse( + pem_certificate="pem_certificate_value", + pem_certificate_chain=["pem_certificate_chain_value"], + ) + response = client.generate_client_certificate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GenerateClientCertificateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.GenerateClientCertificateResponse) + assert response.pem_certificate == "pem_certificate_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + + +def test_generate_client_certificate_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + client.generate_client_certificate() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GenerateClientCertificateRequest() + + +@pytest.mark.asyncio +async def test_generate_client_certificate_async( + transport: str = "grpc_asyncio", + request_type=service.GenerateClientCertificateRequest, +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.GenerateClientCertificateResponse( + pem_certificate="pem_certificate_value", + pem_certificate_chain=["pem_certificate_chain_value"], + ) + ) + response = await client.generate_client_certificate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GenerateClientCertificateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.GenerateClientCertificateResponse) + assert response.pem_certificate == "pem_certificate_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + + +@pytest.mark.asyncio +async def test_generate_client_certificate_async_from_dict(): + await test_generate_client_certificate_async(request_type=dict) + + +def test_generate_client_certificate_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GenerateClientCertificateRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + call.return_value = service.GenerateClientCertificateResponse() + client.generate_client_certificate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_client_certificate_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GenerateClientCertificateRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.GenerateClientCertificateResponse() + ) + await client.generate_client_certificate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_generate_client_certificate_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.GenerateClientCertificateResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_client_certificate( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_generate_client_certificate_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_client_certificate( + service.GenerateClientCertificateRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_generate_client_certificate_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.GenerateClientCertificateResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.GenerateClientCertificateResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_client_certificate( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_client_certificate_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_client_certificate( + service.GenerateClientCertificateRequest(), + parent="parent_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetConnectionInfoRequest, + dict, + ], +) +def test_get_connection_info(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ConnectionInfo( + name="name_value", + ip_address="ip_address_value", + pem_certificate_chain=["pem_certificate_chain_value"], + instance_uid="instance_uid_value", + ) + response = client.get_connection_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetConnectionInfoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ConnectionInfo) + assert response.name == "name_value" + assert response.ip_address == "ip_address_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + assert response.instance_uid == "instance_uid_value" + + +def test_get_connection_info_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + client.get_connection_info() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetConnectionInfoRequest() + + +@pytest.mark.asyncio +async def test_get_connection_info_async( + transport: str = "grpc_asyncio", request_type=service.GetConnectionInfoRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ConnectionInfo( + name="name_value", + ip_address="ip_address_value", + pem_certificate_chain=["pem_certificate_chain_value"], + instance_uid="instance_uid_value", + ) + ) + response = await client.get_connection_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetConnectionInfoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ConnectionInfo) + assert response.name == "name_value" + assert response.ip_address == "ip_address_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + assert response.instance_uid == "instance_uid_value" + + +@pytest.mark.asyncio +async def test_get_connection_info_async_from_dict(): + await test_get_connection_info_async(request_type=dict) + + +def test_get_connection_info_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetConnectionInfoRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + call.return_value = resources.ConnectionInfo() + client.get_connection_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_connection_info_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetConnectionInfoRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ConnectionInfo() + ) + await client.get_connection_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_get_connection_info_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ConnectionInfo() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_connection_info( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_get_connection_info_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection_info( + service.GetConnectionInfoRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_get_connection_info_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ConnectionInfo() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ConnectionInfo() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_connection_info( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_connection_info_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_connection_info( + service.GetConnectionInfoRequest(), + parent="parent_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListClustersRequest, + dict, + ], +) +def test_list_clusters_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_clusters_rest_required_fields(request_type=service.ListClustersRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_clusters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_clusters_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_clusters_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_clusters" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_clusters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListClustersRequest.pb(service.ListClustersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListClustersResponse.to_json( + service.ListClustersResponse() + ) + + request = service.ListClustersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListClustersResponse() + + client.list_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_clusters_rest_bad_request( + transport: str = "rest", request_type=service.ListClustersRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_clusters(request) + + +def test_list_clusters_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/clusters" + % client.transport._host, + args[1], + ) + + +def test_list_clusters_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + service.ListClustersRequest(), + parent="parent_value", + ) + + +def test_list_clusters_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListClustersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Cluster) for i in results) + + pages = list(client.list_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetClusterRequest, + dict, + ], +) +def test_get_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Cluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Cluster.State.READY, + cluster_type=resources.Cluster.ClusterType.PRIMARY, + database_version=resources.DatabaseVersion.POSTGRES_13, + network="network_value", + etag="etag_value", + reconciling=True, + backup_source=resources.BackupSource(backup_uid="backup_uid_value"), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_cluster(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Cluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Cluster.State.READY + assert response.cluster_type == resources.Cluster.ClusterType.PRIMARY + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 + assert response.network == "network_value" + assert response.etag == "etag_value" + assert response.reconciling is True + + +def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Cluster() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_get_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetClusterRequest.pb(service.GetClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Cluster.to_json(resources.Cluster()) + + request = service.GetClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Cluster() + + client.get_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_cluster_rest_bad_request( + transport: str = "rest", request_type=service.GetClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_cluster(request) + + +def test_get_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Cluster() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/clusters/*}" + % client.transport._host, + args[1], + ) + + +def test_get_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + service.GetClusterRequest(), + name="name_value", + ) + + +def test_get_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateClusterRequest, + dict, + ], +) +def test_create_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "continuous_backup_config": { + "enabled": True, + "recovery_window_days": 2166, + "encryption_config": {}, + }, + "continuous_backup_info": { + "encryption_info": {}, + "enabled_time": {}, + "schedule": [1], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_cluster_rest_required_fields(request_type=service.CreateClusterRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "clusterId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == request_init["cluster_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "cluster_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_cluster(request) + + expected_params = [ + ( + "clusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "clusterId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "clusterId", + "cluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_create_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateClusterRequest.pb(service.CreateClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_cluster_rest_bad_request( + transport: str = "rest", request_type=service.CreateClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "continuous_backup_config": { + "enabled": True, + "recovery_window_days": 2166, + "encryption_config": {}, + }, + "continuous_backup_info": { + "encryption_info": {}, + "enabled_time": {}, + "schedule": [1], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_cluster(request) + + +def test_create_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/clusters" + % client.transport._host, + args[1], + ) + + +def test_create_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cluster( + service.CreateClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + +def test_create_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateClusterRequest, + dict, + ], +) +def test_update_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "projects/sample1/locations/sample2/clusters/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "continuous_backup_config": { + "enabled": True, + "recovery_window_days": 2166, + "encryption_config": {}, + }, + "continuous_backup_info": { + "encryption_info": {}, + "enabled_time": {}, + "schedule": [1], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set(("cluster",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_update_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateClusterRequest.pb(service.UpdateClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.UpdateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_cluster_rest_bad_request( + transport: str = "rest", request_type=service.UpdateClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "projects/sample1/locations/sample2/clusters/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "continuous_backup_config": { + "enabled": True, + "recovery_window_days": 2166, + "encryption_config": {}, + }, + "continuous_backup_info": { + "encryption_info": {}, + "enabled_time": {}, + "schedule": [1], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_cluster(request) + + +def test_update_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{cluster.name=projects/*/locations/*/clusters/*}" + % client.transport._host, + args[1], + ) + + +def test_update_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cluster( + service.UpdateClusterRequest(), + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "force", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "force", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_delete_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.DeleteClusterRequest.pb(service.DeleteClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.DeleteClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_cluster_rest_bad_request( + transport: str = "rest", request_type=service.DeleteClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_cluster(request) + + +def test_delete_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/clusters/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cluster( + service.DeleteClusterRequest(), + name="name_value", + ) + + +def test_delete_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.PromoteClusterRequest, + dict, + ], +) +def test_promote_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.promote_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_promote_cluster_rest_required_fields( + request_type=service.PromoteClusterRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).promote_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).promote_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.promote_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_promote_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.promote_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_promote_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_promote_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_promote_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.PromoteClusterRequest.pb(service.PromoteClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.PromoteClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.promote_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_promote_cluster_rest_bad_request( + transport: str = "rest", request_type=service.PromoteClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.promote_cluster(request) + + +def test_promote_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.promote_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/clusters/*}:promote" + % client.transport._host, + args[1], + ) + + +def test_promote_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.promote_cluster( + service.PromoteClusterRequest(), + name="name_value", + ) + + +def test_promote_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.RestoreClusterRequest, + dict, + ], +) +def test_restore_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.restore_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_restore_cluster_rest_required_fields( + request_type=service.RestoreClusterRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.restore_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_restore_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.restore_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "clusterId", + "cluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restore_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_restore_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_restore_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.RestoreClusterRequest.pb(service.RestoreClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.RestoreClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restore_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restore_cluster_rest_bad_request( + transport: str = "rest", request_type=service.RestoreClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restore_cluster(request) + + +def test_restore_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateSecondaryClusterRequest, + dict, + ], +) +def test_create_secondary_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "continuous_backup_config": { + "enabled": True, + "recovery_window_days": 2166, + "encryption_config": {}, + }, + "continuous_backup_info": { + "encryption_info": {}, + "enabled_time": {}, + "schedule": [1], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_secondary_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_secondary_cluster_rest_required_fields( + request_type=service.CreateSecondaryClusterRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "clusterId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_secondary_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == request_init["cluster_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_secondary_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "cluster_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_secondary_cluster(request) + + expected_params = [ + ( + "clusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_secondary_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_secondary_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "clusterId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "clusterId", + "cluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_secondary_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_secondary_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_create_secondary_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateSecondaryClusterRequest.pb( + service.CreateSecondaryClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateSecondaryClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_secondary_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_secondary_cluster_rest_bad_request( + transport: str = "rest", request_type=service.CreateSecondaryClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "continuous_backup_config": { + "enabled": True, + "recovery_window_days": 2166, + "encryption_config": {}, + }, + "continuous_backup_info": { + "encryption_info": {}, + "enabled_time": {}, + "schedule": [1], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_secondary_cluster(request) + + +def test_create_secondary_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_secondary_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/clusters:createsecondary" + % client.transport._host, + args[1], + ) + + +def test_create_secondary_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_secondary_cluster( + service.CreateSecondaryClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + +def test_create_secondary_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListInstancesRequest, + dict, + ], +) +def test_list_instances_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_rest_required_fields(request_type=service.ListInstancesRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_instances(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_instances" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_instances" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListInstancesRequest.pb(service.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListInstancesResponse.to_json( + service.ListInstancesResponse() + ) + + request = service.ListInstancesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListInstancesResponse() + + client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_instances_rest_bad_request( + transport: str = "rest", request_type=service.ListInstancesRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_instances(request) + + +def test_list_instances_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListInstancesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}/instances" + % client.transport._host, + args[1], + ) + + +def test_list_instances_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + service.ListInstancesRequest(), + parent="parent_value", + ) + + +def test_list_instances_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListInstancesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Instance) for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetInstanceRequest, + dict, + ], +) +def test_get_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Instance( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Instance.State.READY, + instance_type=resources.Instance.InstanceType.PRIMARY, + availability_type=resources.Instance.AvailabilityType.ZONAL, + gce_zone="gce_zone_value", + ip_address="ip_address_value", + reconciling=True, + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Instance.State.READY + assert response.instance_type == resources.Instance.InstanceType.PRIMARY + assert response.availability_type == resources.Instance.AvailabilityType.ZONAL + assert response.gce_zone == "gce_zone_value" + assert response.ip_address == "ip_address_value" + assert response.reconciling is True + assert response.etag == "etag_value" + + +def test_get_instance_rest_required_fields(request_type=service.GetInstanceRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = resources.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_get_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetInstanceRequest.pb(service.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Instance.to_json(resources.Instance()) + + request = service.GetInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Instance() + + client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_rest_bad_request( + transport: str = "rest", request_type=service.GetInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_instance(request) + + +def test_get_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_get_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + service.GetInstanceRequest(), + name="name_value", + ) + + +def test_get_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateInstanceRequest, + dict, + ], +) +def test_create_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_instance_rest_required_fields( + request_type=service.CreateInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "instanceId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == request_init["instance_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "instance_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == "instance_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_instance(request) + + expected_params = [ + ( + "instanceId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "instanceId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "instanceId", + "instance", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_create_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateInstanceRequest.pb(service.CreateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_instance_rest_bad_request( + transport: str = "rest", request_type=service.CreateInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_instance(request) + + +def test_create_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}/instances" + % client.transport._host, + args[1], + ) + + +def test_create_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + service.CreateInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +def test_create_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateSecondaryInstanceRequest, + dict, + ], +) +def test_create_secondary_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_secondary_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_secondary_instance_rest_required_fields( + request_type=service.CreateSecondaryInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "instanceId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_secondary_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == request_init["instance_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_secondary_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "instance_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == "instance_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_secondary_instance(request) + + expected_params = [ + ( + "instanceId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_secondary_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_secondary_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "instanceId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "instanceId", + "instance", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_secondary_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_secondary_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_create_secondary_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateSecondaryInstanceRequest.pb( + service.CreateSecondaryInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateSecondaryInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_secondary_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_secondary_instance_rest_bad_request( + transport: str = "rest", request_type=service.CreateSecondaryInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_secondary_instance(request) + + +def test_create_secondary_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_secondary_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}/instances:createsecondary" + % client.transport._host, + args[1], + ) + + +def test_create_secondary_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_secondary_instance( + service.CreateSecondaryInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +def test_create_secondary_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.BatchCreateInstancesRequest, + dict, + ], +) +def test_batch_create_instances_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["requests"] = { + "create_instance_requests": [ + { + "parent": "parent_value", + "instance_id": "instance_id_value", + "instance": { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + }, + "request_id": "request_id_value", + "validate_only": True, + } + ] + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_create_instances(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_create_instances_rest_required_fields( + request_type=service.BatchCreateInstancesRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_create_instances(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_create_instances_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_create_instances._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_create_instances_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_batch_create_instances" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_batch_create_instances" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.BatchCreateInstancesRequest.pb( + service.BatchCreateInstancesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.BatchCreateInstancesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_create_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_create_instances_rest_bad_request( + transport: str = "rest", request_type=service.BatchCreateInstancesRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["requests"] = { + "create_instance_requests": [ + { + "parent": "parent_value", + "instance_id": "instance_id_value", + "instance": { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + }, + "request_id": "request_id_value", + "validate_only": True, + } + ] + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_create_instances(request) + + +def test_batch_create_instances_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_instance_rest_required_fields( + request_type=service.UpdateInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set(("instance",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_update_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateInstanceRequest.pb(service.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.UpdateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_instance_rest_bad_request( + transport: str = "rest", request_type=service.UpdateInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_instance(request) + + +def test_update_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "instance": { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{instance.name=projects/*/locations/*/clusters/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_update_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + service.UpdateInstanceRequest(), + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteInstanceRequest, + dict, + ], +) +def test_delete_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_instance_rest_required_fields( + request_type=service.DeleteInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_delete_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.DeleteInstanceRequest.pb(service.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.DeleteInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_instance_rest_bad_request( + transport: str = "rest", request_type=service.DeleteInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_instance(request) + + +def test_delete_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + service.DeleteInstanceRequest(), + name="name_value", + ) + + +def test_delete_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.FailoverInstanceRequest, + dict, + ], +) +def test_failover_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.failover_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_failover_instance_rest_required_fields( + request_type=service.FailoverInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).failover_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).failover_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.failover_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_failover_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.failover_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_failover_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_failover_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_failover_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.FailoverInstanceRequest.pb( + service.FailoverInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.FailoverInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.failover_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_failover_instance_rest_bad_request( + transport: str = "rest", request_type=service.FailoverInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.failover_instance(request) + + +def test_failover_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.failover_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}:failover" + % client.transport._host, + args[1], + ) + + +def test_failover_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.failover_instance( + service.FailoverInstanceRequest(), + name="name_value", + ) + + +def test_failover_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.RestartInstanceRequest, + dict, + ], +) +def test_restart_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.restart_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_restart_instance_rest_required_fields( + request_type=service.RestartInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restart_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restart_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.restart_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_restart_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.restart_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restart_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_restart_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_restart_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.RestartInstanceRequest.pb(service.RestartInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.RestartInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restart_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restart_instance_rest_bad_request( + transport: str = "rest", request_type=service.RestartInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restart_instance(request) + + +def test_restart_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.restart_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}:restart" + % client.transport._host, + args[1], + ) + + +def test_restart_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restart_instance( + service.RestartInstanceRequest(), + name="name_value", + ) + + +def test_restart_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListBackupsRequest, + dict, + ], +) +def test_list_backups_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_backups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_rest_required_fields(request_type=service.ListBackupsRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListBackupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_backups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backups_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backups_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_backups" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_backups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListBackupsRequest.pb(service.ListBackupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListBackupsResponse.to_json( + service.ListBackupsResponse() + ) + + request = service.ListBackupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListBackupsResponse() + + client.list_backups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backups_rest_bad_request( + transport: str = "rest", request_type=service.ListBackupsRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backups(request) + + +def test_list_backups_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListBackupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/backups" + % client.transport._host, + args[1], + ) + + +def test_list_backups_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + service.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListBackupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Backup) for i in results) + + pages = list(client.list_backups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetBackupRequest, + dict, + ], +) +def test_get_backup_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Backup( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Backup.State.READY, + type_=resources.Backup.Type.ON_DEMAND, + description="description_value", + cluster_uid="cluster_uid_value", + cluster_name="cluster_name_value", + reconciling=True, + etag="etag_value", + size_bytes=1089, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Backup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Backup.State.READY + assert response.type_ == resources.Backup.Type.ON_DEMAND + assert response.description == "description_value" + assert response.cluster_uid == "cluster_uid_value" + assert response.cluster_name == "cluster_name_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.size_bytes == 1089 + + +def test_get_backup_rest_required_fields(request_type=service.GetBackupRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Backup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = resources.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_backup" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_get_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetBackupRequest.pb(service.GetBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Backup.to_json(resources.Backup()) + + request = service.GetBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Backup() + + client.get_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_rest_bad_request( + transport: str = "rest", request_type=service.GetBackupRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup(request) + + +def test_get_backup_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Backup() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + service.GetBackupRequest(), + name="name_value", + ) + + +def test_get_backup_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateBackupRequest, + dict, + ], +) +def test_create_backup_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "type_": 1, + "description": "description_value", + "cluster_uid": "cluster_uid_value", + "cluster_name": "cluster_name_value", + "reconciling": True, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "etag": "etag_value", + "annotations": {}, + "size_bytes": 1089, + "expiry_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_backup_rest_required_fields(request_type=service.CreateBackupRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "backupId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == request_init["backup_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupId"] = "backup_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == "backup_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_backup(request) + + expected_params = [ + ( + "backupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "backupId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "backupId", + "backup", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_backup" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_create_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateBackupRequest.pb(service.CreateBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_rest_bad_request( + transport: str = "rest", request_type=service.CreateBackupRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "type_": 1, + "description": "description_value", + "cluster_uid": "cluster_uid_value", + "cluster_name": "cluster_name_value", + "reconciling": True, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "etag": "etag_value", + "annotations": {}, + "size_bytes": 1089, + "expiry_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup(request) + + +def test_create_backup_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/backups" + % client.transport._host, + args[1], + ) + + +def test_create_backup_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup( + service.CreateBackupRequest(), + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + +def test_create_backup_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateBackupRequest, + dict, + ], +) +def test_update_backup_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "backup": {"name": "projects/sample1/locations/sample2/backups/sample3"} + } + request_init["backup"] = { + "name": "projects/sample1/locations/sample2/backups/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "type_": 1, + "description": "description_value", + "cluster_uid": "cluster_uid_value", + "cluster_name": "cluster_name_value", + "reconciling": True, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "etag": "etag_value", + "annotations": {}, + "size_bytes": 1089, + "expiry_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_backup_rest_required_fields(request_type=service.UpdateBackupRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_backup_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set(("backup",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_backup" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_update_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateBackupRequest.pb(service.UpdateBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.UpdateBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_backup_rest_bad_request( + transport: str = "rest", request_type=service.UpdateBackupRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "backup": {"name": "projects/sample1/locations/sample2/backups/sample3"} + } + request_init["backup"] = { + "name": "projects/sample1/locations/sample2/backups/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "type_": 1, + "description": "description_value", + "cluster_uid": "cluster_uid_value", + "cluster_name": "cluster_name_value", + "reconciling": True, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "etag": "etag_value", + "annotations": {}, + "size_bytes": 1089, + "expiry_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_backup(request) + + +def test_update_backup_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backup": {"name": "projects/sample1/locations/sample2/backups/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{backup.name=projects/*/locations/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_backup_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + service.UpdateBackupRequest(), + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_backup_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_backup_rest_required_fields(request_type=service.DeleteBackupRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_backup" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_delete_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.DeleteBackupRequest.pb(service.DeleteBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.DeleteBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_rest_bad_request( + transport: str = "rest", request_type=service.DeleteBackupRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup(request) + + +def test_delete_backup_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + service.DeleteBackupRequest(), + name="name_value", + ) + + +def test_delete_backup_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListSupportedDatabaseFlagsRequest, + dict, + ], +) +def test_list_supported_database_flags_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListSupportedDatabaseFlagsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListSupportedDatabaseFlagsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_supported_database_flags(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSupportedDatabaseFlagsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_supported_database_flags_rest_required_fields( + request_type=service.ListSupportedDatabaseFlagsRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_supported_database_flags._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_supported_database_flags._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListSupportedDatabaseFlagsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListSupportedDatabaseFlagsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_supported_database_flags(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_supported_database_flags_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_supported_database_flags._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_supported_database_flags_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_supported_database_flags" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_supported_database_flags" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListSupportedDatabaseFlagsRequest.pb( + service.ListSupportedDatabaseFlagsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListSupportedDatabaseFlagsResponse.to_json( + service.ListSupportedDatabaseFlagsResponse() + ) + + request = service.ListSupportedDatabaseFlagsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListSupportedDatabaseFlagsResponse() + + client.list_supported_database_flags( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_supported_database_flags_rest_bad_request( + transport: str = "rest", request_type=service.ListSupportedDatabaseFlagsRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_supported_database_flags(request) + + +def test_list_supported_database_flags_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListSupportedDatabaseFlagsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListSupportedDatabaseFlagsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_supported_database_flags(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/supportedDatabaseFlags" + % client.transport._host, + args[1], + ) + + +def test_list_supported_database_flags_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_supported_database_flags( + service.ListSupportedDatabaseFlagsRequest(), + parent="parent_value", + ) + + +def test_list_supported_database_flags_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + service.ListSupportedDatabaseFlagsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_supported_database_flags(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in results) + + pages = list(client.list_supported_database_flags(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GenerateClientCertificateRequest, + dict, + ], +) +def test_generate_client_certificate_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.GenerateClientCertificateResponse( + pem_certificate="pem_certificate_value", + pem_certificate_chain=["pem_certificate_chain_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.GenerateClientCertificateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_client_certificate(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.GenerateClientCertificateResponse) + assert response.pem_certificate == "pem_certificate_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + + +def test_generate_client_certificate_rest_required_fields( + request_type=service.GenerateClientCertificateRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_client_certificate._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_client_certificate._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.GenerateClientCertificateResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.GenerateClientCertificateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.generate_client_certificate(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_client_certificate_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_client_certificate._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_client_certificate_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_generate_client_certificate" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_generate_client_certificate" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GenerateClientCertificateRequest.pb( + service.GenerateClientCertificateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.GenerateClientCertificateResponse.to_json( + service.GenerateClientCertificateResponse() + ) + + request = service.GenerateClientCertificateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.GenerateClientCertificateResponse() + + client.generate_client_certificate( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_client_certificate_rest_bad_request( + transport: str = "rest", request_type=service.GenerateClientCertificateRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_client_certificate(request) + + +def test_generate_client_certificate_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.GenerateClientCertificateResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.GenerateClientCertificateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.generate_client_certificate(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}:generateClientCertificate" + % client.transport._host, + args[1], + ) + + +def test_generate_client_certificate_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_client_certificate( + service.GenerateClientCertificateRequest(), + parent="parent_value", + ) + + +def test_generate_client_certificate_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetConnectionInfoRequest, + dict, + ], +) +def test_get_connection_info_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.ConnectionInfo( + name="name_value", + ip_address="ip_address_value", + pem_certificate_chain=["pem_certificate_chain_value"], + instance_uid="instance_uid_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.ConnectionInfo.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_connection_info(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ConnectionInfo) + assert response.name == "name_value" + assert response.ip_address == "ip_address_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + assert response.instance_uid == "instance_uid_value" + + +def test_get_connection_info_rest_required_fields( + request_type=service.GetConnectionInfoRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_connection_info._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_connection_info._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.ConnectionInfo() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = resources.ConnectionInfo.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_connection_info(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_connection_info_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_connection_info._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_connection_info_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_connection_info" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_get_connection_info" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetConnectionInfoRequest.pb( + service.GetConnectionInfoRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.ConnectionInfo.to_json( + resources.ConnectionInfo() + ) + + request = service.GetConnectionInfoRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.ConnectionInfo() + + client.get_connection_info( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_connection_info_rest_bad_request( + transport: str = "rest", request_type=service.GetConnectionInfoRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_connection_info(request) + + +def test_get_connection_info_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.ConnectionInfo() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.ConnectionInfo.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_connection_info(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/clusters/*/instances/*}/connectionInfo" + % client.transport._host, + args[1], + ) + + +def test_get_connection_info_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection_info( + service.GetConnectionInfoRequest(), + parent="parent_value", + ) + + +def test_get_connection_info_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AlloyDBAdminClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AlloyDBAdminClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AlloyDBAdminClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AlloyDBAdminClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AlloyDBAdminClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AlloyDBAdminGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AlloyDBAdminGrpcTransport, + transports.AlloyDBAdminGrpcAsyncIOTransport, + transports.AlloyDBAdminRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AlloyDBAdminClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AlloyDBAdminGrpcTransport, + ) + + +def test_alloy_db_admin_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AlloyDBAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_alloy_db_admin_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.alloydb_v1alpha.services.alloy_db_admin.transports.AlloyDBAdminTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AlloyDBAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_clusters", + "get_cluster", + "create_cluster", + "update_cluster", + "delete_cluster", + "promote_cluster", + "restore_cluster", + "create_secondary_cluster", + "list_instances", + "get_instance", + "create_instance", + "create_secondary_instance", + "batch_create_instances", + "update_instance", + "delete_instance", + "failover_instance", + "restart_instance", + "list_backups", + "get_backup", + "create_backup", + "update_backup", + "delete_backup", + "list_supported_database_flags", + "generate_client_certificate", + "get_connection_info", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_alloy_db_admin_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.alloydb_v1alpha.services.alloy_db_admin.transports.AlloyDBAdminTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AlloyDBAdminTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_alloy_db_admin_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.alloydb_v1alpha.services.alloy_db_admin.transports.AlloyDBAdminTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AlloyDBAdminTransport() + adc.assert_called_once() + + +def test_alloy_db_admin_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AlloyDBAdminClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AlloyDBAdminGrpcTransport, + transports.AlloyDBAdminGrpcAsyncIOTransport, + ], +) +def test_alloy_db_admin_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AlloyDBAdminGrpcTransport, + transports.AlloyDBAdminGrpcAsyncIOTransport, + transports.AlloyDBAdminRestTransport, + ], +) +def test_alloy_db_admin_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AlloyDBAdminGrpcTransport, grpc_helpers), + (transports.AlloyDBAdminGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_alloy_db_admin_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "alloydb.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="alloydb.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.AlloyDBAdminGrpcTransport, transports.AlloyDBAdminGrpcAsyncIOTransport], +) +def test_alloy_db_admin_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_alloy_db_admin_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AlloyDBAdminRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_alloy_db_admin_rest_lro_client(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_alloy_db_admin_host_no_port(transport_name): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="alloydb.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "alloydb.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://alloydb.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_alloy_db_admin_host_with_port(transport_name): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="alloydb.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "alloydb.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://alloydb.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_alloy_db_admin_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AlloyDBAdminClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AlloyDBAdminClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_clusters._session + session2 = client2.transport.list_clusters._session + assert session1 != session2 + session1 = client1.transport.get_cluster._session + session2 = client2.transport.get_cluster._session + assert session1 != session2 + session1 = client1.transport.create_cluster._session + session2 = client2.transport.create_cluster._session + assert session1 != session2 + session1 = client1.transport.update_cluster._session + session2 = client2.transport.update_cluster._session + assert session1 != session2 + session1 = client1.transport.delete_cluster._session + session2 = client2.transport.delete_cluster._session + assert session1 != session2 + session1 = client1.transport.promote_cluster._session + session2 = client2.transport.promote_cluster._session + assert session1 != session2 + session1 = client1.transport.restore_cluster._session + session2 = client2.transport.restore_cluster._session + assert session1 != session2 + session1 = client1.transport.create_secondary_cluster._session + session2 = client2.transport.create_secondary_cluster._session + assert session1 != session2 + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.get_instance._session + session2 = client2.transport.get_instance._session + assert session1 != session2 + session1 = client1.transport.create_instance._session + session2 = client2.transport.create_instance._session + assert session1 != session2 + session1 = client1.transport.create_secondary_instance._session + session2 = client2.transport.create_secondary_instance._session + assert session1 != session2 + session1 = client1.transport.batch_create_instances._session + session2 = client2.transport.batch_create_instances._session + assert session1 != session2 + session1 = client1.transport.update_instance._session + session2 = client2.transport.update_instance._session + assert session1 != session2 + session1 = client1.transport.delete_instance._session + session2 = client2.transport.delete_instance._session + assert session1 != session2 + session1 = client1.transport.failover_instance._session + session2 = client2.transport.failover_instance._session + assert session1 != session2 + session1 = client1.transport.restart_instance._session + session2 = client2.transport.restart_instance._session + assert session1 != session2 + session1 = client1.transport.list_backups._session + session2 = client2.transport.list_backups._session + assert session1 != session2 + session1 = client1.transport.get_backup._session + session2 = client2.transport.get_backup._session + assert session1 != session2 + session1 = client1.transport.create_backup._session + session2 = client2.transport.create_backup._session + assert session1 != session2 + session1 = client1.transport.update_backup._session + session2 = client2.transport.update_backup._session + assert session1 != session2 + session1 = client1.transport.delete_backup._session + session2 = client2.transport.delete_backup._session + assert session1 != session2 + session1 = client1.transport.list_supported_database_flags._session + session2 = client2.transport.list_supported_database_flags._session + assert session1 != session2 + session1 = client1.transport.generate_client_certificate._session + session2 = client2.transport.generate_client_certificate._session + assert session1 != session2 + session1 = client1.transport.get_connection_info._session + session2 = client2.transport.get_connection_info._session + assert session1 != session2 + + +def test_alloy_db_admin_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AlloyDBAdminGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_alloy_db_admin_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AlloyDBAdminGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.AlloyDBAdminGrpcTransport, transports.AlloyDBAdminGrpcAsyncIOTransport], +) +def test_alloy_db_admin_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.AlloyDBAdminGrpcTransport, transports.AlloyDBAdminGrpcAsyncIOTransport], +) +def test_alloy_db_admin_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_alloy_db_admin_grpc_lro_client(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_alloy_db_admin_grpc_lro_async_client(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_backup_path(): + project = "squid" + location = "clam" + backup = "whelk" + expected = "projects/{project}/locations/{location}/backups/{backup}".format( + project=project, + location=location, + backup=backup, + ) + actual = AlloyDBAdminClient.backup_path(project, location, backup) + assert expected == actual + + +def test_parse_backup_path(): + expected = { + "project": "octopus", + "location": "oyster", + "backup": "nudibranch", + } + path = AlloyDBAdminClient.backup_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_backup_path(path) + assert expected == actual + + +def test_cluster_path(): + project = "cuttlefish" + location = "mussel" + cluster = "winkle" + expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, + location=location, + cluster=cluster, + ) + actual = AlloyDBAdminClient.cluster_path(project, location, cluster) + assert expected == actual + + +def test_parse_cluster_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "cluster": "abalone", + } + path = AlloyDBAdminClient.cluster_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_cluster_path(path) + assert expected == actual + + +def test_connection_info_path(): + project = "squid" + location = "clam" + cluster = "whelk" + instance = "octopus" + expected = "projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance}/connectionInfo".format( + project=project, + location=location, + cluster=cluster, + instance=instance, + ) + actual = AlloyDBAdminClient.connection_info_path( + project, location, cluster, instance + ) + assert expected == actual + + +def test_parse_connection_info_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "cluster": "cuttlefish", + "instance": "mussel", + } + path = AlloyDBAdminClient.connection_info_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_connection_info_path(path) + assert expected == actual + + +def test_crypto_key_version_path(): + project = "winkle" + location = "nautilus" + key_ring = "scallop" + crypto_key = "abalone" + crypto_key_version = "squid" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + crypto_key_version=crypto_key_version, + ) + actual = AlloyDBAdminClient.crypto_key_version_path( + project, location, key_ring, crypto_key, crypto_key_version + ) + assert expected == actual + + +def test_parse_crypto_key_version_path(): + expected = { + "project": "clam", + "location": "whelk", + "key_ring": "octopus", + "crypto_key": "oyster", + "crypto_key_version": "nudibranch", + } + path = AlloyDBAdminClient.crypto_key_version_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_crypto_key_version_path(path) + assert expected == actual + + +def test_instance_path(): + project = "cuttlefish" + location = "mussel" + cluster = "winkle" + instance = "nautilus" + expected = "projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance}".format( + project=project, + location=location, + cluster=cluster, + instance=instance, + ) + actual = AlloyDBAdminClient.instance_path(project, location, cluster, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "scallop", + "location": "abalone", + "cluster": "squid", + "instance": "clam", + } + path = AlloyDBAdminClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_instance_path(path) + assert expected == actual + + +def test_network_path(): + project = "whelk" + network = "octopus" + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + actual = AlloyDBAdminClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "oyster", + "network": "nudibranch", + } + path = AlloyDBAdminClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_network_path(path) + assert expected == actual + + +def test_supported_database_flag_path(): + project = "cuttlefish" + location = "mussel" + flag = "winkle" + expected = "projects/{project}/locations/{location}/flags/{flag}".format( + project=project, + location=location, + flag=flag, + ) + actual = AlloyDBAdminClient.supported_database_flag_path(project, location, flag) + assert expected == actual + + +def test_parse_supported_database_flag_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "flag": "abalone", + } + path = AlloyDBAdminClient.supported_database_flag_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_supported_database_flag_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AlloyDBAdminClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = AlloyDBAdminClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AlloyDBAdminClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = AlloyDBAdminClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AlloyDBAdminClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = AlloyDBAdminClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = AlloyDBAdminClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = AlloyDBAdminClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AlloyDBAdminClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = AlloyDBAdminClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AlloyDBAdminTransport, "_prep_wrapped_messages" + ) as prep: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AlloyDBAdminTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AlloyDBAdminClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = AlloyDBAdminClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = AlloyDBAdminAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport), + (AlloyDBAdminAsyncClient, transports.AlloyDBAdminGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/__init__.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py new file mode 100644 index 000000000000..06fdd972a11e --- /dev/null +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py @@ -0,0 +1,17678 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.alloydb_v1beta.services.alloy_db_admin import ( + AlloyDBAdminAsyncClient, + AlloyDBAdminClient, + pagers, + transports, +) +from google.cloud.alloydb_v1beta.types import resources, service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AlloyDBAdminClient._get_default_mtls_endpoint(None) is None + assert ( + AlloyDBAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + AlloyDBAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AlloyDBAdminClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AlloyDBAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert AlloyDBAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AlloyDBAdminClient, "grpc"), + (AlloyDBAdminAsyncClient, "grpc_asyncio"), + (AlloyDBAdminClient, "rest"), + ], +) +def test_alloy_db_admin_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "alloydb.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://alloydb.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AlloyDBAdminGrpcTransport, "grpc"), + (transports.AlloyDBAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AlloyDBAdminRestTransport, "rest"), + ], +) +def test_alloy_db_admin_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AlloyDBAdminClient, "grpc"), + (AlloyDBAdminAsyncClient, "grpc_asyncio"), + (AlloyDBAdminClient, "rest"), + ], +) +def test_alloy_db_admin_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "alloydb.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://alloydb.googleapis.com" + ) + + +def test_alloy_db_admin_client_get_transport_class(): + transport = AlloyDBAdminClient.get_transport_class() + available_transports = [ + transports.AlloyDBAdminGrpcTransport, + transports.AlloyDBAdminRestTransport, + ] + assert transport in available_transports + + transport = AlloyDBAdminClient.get_transport_class("grpc") + assert transport == transports.AlloyDBAdminGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport, "grpc"), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest"), + ], +) +@mock.patch.object( + AlloyDBAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlloyDBAdminClient) +) +@mock.patch.object( + AlloyDBAdminAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AlloyDBAdminAsyncClient), +) +def test_alloy_db_admin_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AlloyDBAdminClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AlloyDBAdminClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport, "grpc", "true"), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport, "grpc", "false"), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest", "true"), + (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + AlloyDBAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlloyDBAdminClient) +) +@mock.patch.object( + AlloyDBAdminAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AlloyDBAdminAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_alloy_db_admin_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [AlloyDBAdminClient, AlloyDBAdminAsyncClient]) +@mock.patch.object( + AlloyDBAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AlloyDBAdminClient) +) +@mock.patch.object( + AlloyDBAdminAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AlloyDBAdminAsyncClient), +) +def test_alloy_db_admin_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport, "grpc"), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest"), + ], +) +def test_alloy_db_admin_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AlloyDBAdminClient, + transports.AlloyDBAdminGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest", None), + ], +) +def test_alloy_db_admin_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_alloy_db_admin_client_client_options_from_dict(): + with mock.patch( + "google.cloud.alloydb_v1beta.services.alloy_db_admin.transports.AlloyDBAdminGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AlloyDBAdminClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AlloyDBAdminClient, + transports.AlloyDBAdminGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AlloyDBAdminAsyncClient, + transports.AlloyDBAdminGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_alloy_db_admin_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "alloydb.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="alloydb.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListClustersRequest, + dict, + ], +) +def test_list_clusters(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_clusters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + client.list_clusters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListClustersRequest() + + +@pytest.mark.asyncio +async def test_list_clusters_async( + transport: str = "grpc_asyncio", request_type=service.ListClustersRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_clusters_async_from_dict(): + await test_list_clusters_async(request_type=dict) + + +def test_list_clusters_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = service.ListClustersResponse() + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_clusters_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListClustersResponse() + ) + await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_clusters_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListClustersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_clusters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_clusters_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + service.ListClustersRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_clusters_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListClustersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListClustersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_clusters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_clusters_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_clusters( + service.ListClustersRequest(), + parent="parent_value", + ) + + +def test_list_clusters_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_clusters(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Cluster) for i in results) + + +def test_list_clusters_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + pages = list(client.list_clusters(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_clusters_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_clusters), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_clusters( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Cluster) for i in responses) + + +@pytest.mark.asyncio +async def test_list_clusters_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_clusters), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_clusters(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetClusterRequest, + dict, + ], +) +def test_get_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Cluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Cluster.State.READY, + cluster_type=resources.Cluster.ClusterType.PRIMARY, + database_version=resources.DatabaseVersion.POSTGRES_13, + network="network_value", + etag="etag_value", + reconciling=True, + backup_source=resources.BackupSource(backup_uid="backup_uid_value"), + ) + response = client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Cluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Cluster.State.READY + assert response.cluster_type == resources.Cluster.ClusterType.PRIMARY + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 + assert response.network == "network_value" + assert response.etag == "etag_value" + assert response.reconciling is True + + +def test_get_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + client.get_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetClusterRequest() + + +@pytest.mark.asyncio +async def test_get_cluster_async( + transport: str = "grpc_asyncio", request_type=service.GetClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Cluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Cluster.State.READY, + cluster_type=resources.Cluster.ClusterType.PRIMARY, + database_version=resources.DatabaseVersion.POSTGRES_13, + network="network_value", + etag="etag_value", + reconciling=True, + ) + ) + response = await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Cluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Cluster.State.READY + assert response.cluster_type == resources.Cluster.ClusterType.PRIMARY + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 + assert response.network == "network_value" + assert response.etag == "etag_value" + assert response.reconciling is True + + +@pytest.mark.asyncio +async def test_get_cluster_async_from_dict(): + await test_get_cluster_async(request_type=dict) + + +def test_get_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = resources.Cluster() + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Cluster()) + await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Cluster() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + service.GetClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Cluster() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Cluster()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_cluster( + service.GetClusterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateClusterRequest, + dict, + ], +) +def test_create_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + client.create_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateClusterRequest() + + +@pytest.mark.asyncio +async def test_create_cluster_async( + transport: str = "grpc_asyncio", request_type=service.CreateClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_cluster_async_from_dict(): + await test_create_cluster_async(request_type=dict) + + +def test_create_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_cluster( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +def test_create_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cluster( + service.CreateClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_cluster( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_cluster( + service.CreateClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateClusterRequest, + dict, + ], +) +def test_update_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + client.update_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateClusterRequest() + + +@pytest.mark.asyncio +async def test_update_cluster_async( + transport: str = "grpc_asyncio", request_type=service.UpdateClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_cluster_async_from_dict(): + await test_update_cluster_async(request_type=dict) + + +def test_update_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateClusterRequest() + + request.cluster.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "cluster.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateClusterRequest() + + request.cluster.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "cluster.name=name_value", + ) in kw["metadata"] + + +def test_update_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_cluster( + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cluster( + service.UpdateClusterRequest(), + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_cluster( + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_cluster( + service.UpdateClusterRequest(), + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + client.delete_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteClusterRequest() + + +@pytest.mark.asyncio +async def test_delete_cluster_async( + transport: str = "grpc_asyncio", request_type=service.DeleteClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_cluster_async_from_dict(): + await test_delete_cluster_async(request_type=dict) + + +def test_delete_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cluster( + service.DeleteClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_cluster( + service.DeleteClusterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.PromoteClusterRequest, + dict, + ], +) +def test_promote_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.promote_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.PromoteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_promote_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + client.promote_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.PromoteClusterRequest() + + +@pytest.mark.asyncio +async def test_promote_cluster_async( + transport: str = "grpc_asyncio", request_type=service.PromoteClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.promote_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.PromoteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_promote_cluster_async_from_dict(): + await test_promote_cluster_async(request_type=dict) + + +def test_promote_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.PromoteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.promote_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_promote_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.PromoteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.promote_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_promote_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.promote_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_promote_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.promote_cluster( + service.PromoteClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_promote_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.promote_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_promote_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.promote_cluster( + service.PromoteClusterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.RestoreClusterRequest, + dict, + ], +) +def test_restore_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restore_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestoreClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restore_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + client.restore_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestoreClusterRequest() + + +@pytest.mark.asyncio +async def test_restore_cluster_async( + transport: str = "grpc_asyncio", request_type=service.RestoreClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestoreClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restore_cluster_async_from_dict(): + await test_restore_cluster_async(request_type=dict) + + +def test_restore_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RestoreClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_restore_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RestoreClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.restore_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateSecondaryClusterRequest, + dict, + ], +) +def test_create_secondary_cluster(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_secondary_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSecondaryClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_secondary_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + client.create_secondary_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSecondaryClusterRequest() + + +@pytest.mark.asyncio +async def test_create_secondary_cluster_async( + transport: str = "grpc_asyncio", request_type=service.CreateSecondaryClusterRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_secondary_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSecondaryClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_secondary_cluster_async_from_dict(): + await test_create_secondary_cluster_async(request_type=dict) + + +def test_create_secondary_cluster_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateSecondaryClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_secondary_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_secondary_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateSecondaryClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_secondary_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_secondary_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_secondary_cluster( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +def test_create_secondary_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_secondary_cluster( + service.CreateSecondaryClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_secondary_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_secondary_cluster( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_secondary_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_secondary_cluster( + service.CreateSecondaryClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListInstancesRequest, + dict, + ], +) +def test_list_instances(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListInstancesRequest() + + +@pytest.mark.asyncio +async def test_list_instances_async( + transport: str = "grpc_asyncio", request_type=service.ListInstancesRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + + +def test_list_instances_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = service.ListInstancesResponse() + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_instances_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse() + ) + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_instances_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListInstancesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instances( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_instances_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + service.ListInstancesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_instances_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListInstancesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instances( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_instances_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instances( + service.ListInstancesRequest(), + parent="parent_value", + ) + + +def test_list_instances_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_instances(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Instance) for i in results) + + +def test_list_instances_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instances( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Instance) for i in responses) + + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_instances(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetInstanceRequest, + dict, + ], +) +def test_get_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Instance( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Instance.State.READY, + instance_type=resources.Instance.InstanceType.PRIMARY, + availability_type=resources.Instance.AvailabilityType.ZONAL, + gce_zone="gce_zone_value", + ip_address="ip_address_value", + reconciling=True, + etag="etag_value", + ) + response = client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Instance.State.READY + assert response.instance_type == resources.Instance.InstanceType.PRIMARY + assert response.availability_type == resources.Instance.AvailabilityType.ZONAL + assert response.gce_zone == "gce_zone_value" + assert response.ip_address == "ip_address_value" + assert response.reconciling is True + assert response.etag == "etag_value" + + +def test_get_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetInstanceRequest() + + +@pytest.mark.asyncio +async def test_get_instance_async( + transport: str = "grpc_asyncio", request_type=service.GetInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Instance( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Instance.State.READY, + instance_type=resources.Instance.InstanceType.PRIMARY, + availability_type=resources.Instance.AvailabilityType.ZONAL, + gce_zone="gce_zone_value", + ip_address="ip_address_value", + reconciling=True, + etag="etag_value", + ) + ) + response = await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Instance.State.READY + assert response.instance_type == resources.Instance.InstanceType.PRIMARY + assert response.availability_type == resources.Instance.AvailabilityType.ZONAL + assert response.gce_zone == "gce_zone_value" + assert response.ip_address == "ip_address_value" + assert response.reconciling is True + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + + +def test_get_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = resources.Instance() + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Instance()) + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Instance() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + service.GetInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Instance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Instance()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance( + service.GetInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateInstanceRequest, + dict, + ], +) +def test_create_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateInstanceRequest() + + +@pytest.mark.asyncio +async def test_create_instance_async( + transport: str = "grpc_asyncio", request_type=service.CreateInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + + +def test_create_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateInstanceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateInstanceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +def test_create_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + service.CreateInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance( + service.CreateInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateSecondaryInstanceRequest, + dict, + ], +) +def test_create_secondary_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_secondary_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSecondaryInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_secondary_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + client.create_secondary_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSecondaryInstanceRequest() + + +@pytest.mark.asyncio +async def test_create_secondary_instance_async( + transport: str = "grpc_asyncio", request_type=service.CreateSecondaryInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_secondary_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSecondaryInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_secondary_instance_async_from_dict(): + await test_create_secondary_instance_async(request_type=dict) + + +def test_create_secondary_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateSecondaryInstanceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_secondary_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_secondary_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateSecondaryInstanceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_secondary_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_secondary_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_secondary_instance( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +def test_create_secondary_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_secondary_instance( + service.CreateSecondaryInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_secondary_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_secondary_instance( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_secondary_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_secondary_instance( + service.CreateSecondaryInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.BatchCreateInstancesRequest, + dict, + ], +) +def test_batch_create_instances(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.BatchCreateInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_batch_create_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + client.batch_create_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.BatchCreateInstancesRequest() + + +@pytest.mark.asyncio +async def test_batch_create_instances_async( + transport: str = "grpc_asyncio", request_type=service.BatchCreateInstancesRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.BatchCreateInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_batch_create_instances_async_from_dict(): + await test_batch_create_instances_async(request_type=dict) + + +def test_batch_create_instances_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.BatchCreateInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_create_instances_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.BatchCreateInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateInstanceRequest() + + +@pytest.mark.asyncio +async def test_update_instance_async( + transport: str = "grpc_asyncio", request_type=service.UpdateInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_async_from_dict(): + await test_update_instance_async(request_type=dict) + + +def test_update_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateInstanceRequest() + + request.instance.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateInstanceRequest() + + request.instance.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] + + +def test_update_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance( + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + service.UpdateInstanceRequest(), + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance( + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance( + service.UpdateInstanceRequest(), + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteInstanceRequest, + dict, + ], +) +def test_delete_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteInstanceRequest() + + +@pytest.mark.asyncio +async def test_delete_instance_async( + transport: str = "grpc_asyncio", request_type=service.DeleteInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + + +def test_delete_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + service.DeleteInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance( + service.DeleteInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.FailoverInstanceRequest, + dict, + ], +) +def test_failover_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.FailoverInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_failover_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + client.failover_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.FailoverInstanceRequest() + + +@pytest.mark.asyncio +async def test_failover_instance_async( + transport: str = "grpc_asyncio", request_type=service.FailoverInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.FailoverInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_failover_instance_async_from_dict(): + await test_failover_instance_async(request_type=dict) + + +def test_failover_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.FailoverInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_failover_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.FailoverInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_failover_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.failover_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_failover_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.failover_instance( + service.FailoverInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_failover_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.failover_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_failover_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.failover_instance( + service.FailoverInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.RestartInstanceRequest, + dict, + ], +) +def test_restart_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restart_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestartInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restart_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + client.restart_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestartInstanceRequest() + + +@pytest.mark.asyncio +async def test_restart_instance_async( + transport: str = "grpc_asyncio", request_type=service.RestartInstanceRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restart_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.RestartInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restart_instance_async_from_dict(): + await test_restart_instance_async(request_type=dict) + + +def test_restart_instance_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RestartInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restart_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_restart_instance_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RestartInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.restart_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_restart_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.restart_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_restart_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restart_instance( + service.RestartInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_restart_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.restart_instance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_restart_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.restart_instance( + service.RestartInstanceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListBackupsRequest, + dict, + ], +) +def test_list_backups(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListBackupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListBackupsRequest() + + +@pytest.mark.asyncio +async def test_list_backups_async( + transport: str = "grpc_asyncio", request_type=service.ListBackupsRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListBackupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) + + +def test_list_backups_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = service.ListBackupsResponse() + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backups_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListBackupsResponse() + ) + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backups_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListBackupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backups_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + service.ListBackupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backups_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListBackupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListBackupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backups_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backups( + service.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backups(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Backup) for i in results) + + +def test_list_backups_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backups_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Backup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backups_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_backups(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetBackupRequest, + dict, + ], +) +def test_get_backup(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Backup( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Backup.State.READY, + type_=resources.Backup.Type.ON_DEMAND, + description="description_value", + cluster_uid="cluster_uid_value", + cluster_name="cluster_name_value", + reconciling=True, + etag="etag_value", + size_bytes=1089, + ) + response = client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Backup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Backup.State.READY + assert response.type_ == resources.Backup.Type.ON_DEMAND + assert response.description == "description_value" + assert response.cluster_uid == "cluster_uid_value" + assert response.cluster_name == "cluster_name_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.size_bytes == 1089 + + +def test_get_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetBackupRequest() + + +@pytest.mark.asyncio +async def test_get_backup_async( + transport: str = "grpc_asyncio", request_type=service.GetBackupRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Backup( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Backup.State.READY, + type_=resources.Backup.Type.ON_DEMAND, + description="description_value", + cluster_uid="cluster_uid_value", + cluster_name="cluster_name_value", + reconciling=True, + etag="etag_value", + size_bytes=1089, + ) + ) + response = await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Backup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Backup.State.READY + assert response.type_ == resources.Backup.Type.ON_DEMAND + assert response.description == "description_value" + assert response.cluster_uid == "cluster_uid_value" + assert response.cluster_name == "cluster_name_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.size_bytes == 1089 + + +@pytest.mark.asyncio +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) + + +def test_get_backup_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = resources.Backup() + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Backup()) + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + service.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup( + service.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateBackupRequest, + dict, + ], +) +def test_create_backup(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + client.create_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateBackupRequest() + + +@pytest.mark.asyncio +async def test_create_backup_async( + transport: str = "grpc_asyncio", request_type=service.CreateBackupRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_async_from_dict(): + await test_create_backup_async(request_type=dict) + + +def test_create_backup_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateBackupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateBackupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup( + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].backup_id + mock_val = "backup_id_value" + assert arg == mock_val + + +def test_create_backup_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup( + service.CreateBackupRequest(), + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup( + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].backup_id + mock_val = "backup_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup( + service.CreateBackupRequest(), + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateBackupRequest, + dict, + ], +) +def test_update_backup(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + client.update_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateBackupRequest() + + +@pytest.mark.asyncio +async def test_update_backup_async( + transport: str = "grpc_asyncio", request_type=service.UpdateBackupRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_backup_async_from_dict(): + await test_update_backup_async(request_type=dict) + + +def test_update_backup_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backup_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +def test_update_backup_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup( + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_backup_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + service.UpdateBackupRequest(), + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_backup_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup( + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_backup_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup( + service.UpdateBackupRequest(), + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteBackupRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_async( + transport: str = "grpc_asyncio", request_type=service.DeleteBackupRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) + + +def test_delete_backup_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + service.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup( + service.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListSupportedDatabaseFlagsRequest, + dict, + ], +) +def test_list_supported_database_flags(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSupportedDatabaseFlagsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_supported_database_flags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSupportedDatabaseFlagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSupportedDatabaseFlagsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_supported_database_flags_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + client.list_supported_database_flags() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSupportedDatabaseFlagsRequest() + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_async( + transport: str = "grpc_asyncio", + request_type=service.ListSupportedDatabaseFlagsRequest, +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSupportedDatabaseFlagsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_supported_database_flags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSupportedDatabaseFlagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSupportedDatabaseFlagsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_async_from_dict(): + await test_list_supported_database_flags_async(request_type=dict) + + +def test_list_supported_database_flags_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSupportedDatabaseFlagsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + call.return_value = service.ListSupportedDatabaseFlagsResponse() + client.list_supported_database_flags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSupportedDatabaseFlagsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSupportedDatabaseFlagsResponse() + ) + await client.list_supported_database_flags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_supported_database_flags_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSupportedDatabaseFlagsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_supported_database_flags( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_supported_database_flags_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_supported_database_flags( + service.ListSupportedDatabaseFlagsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSupportedDatabaseFlagsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSupportedDatabaseFlagsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_supported_database_flags( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_supported_database_flags( + service.ListSupportedDatabaseFlagsRequest(), + parent="parent_value", + ) + + +def test_list_supported_database_flags_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_supported_database_flags(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in results) + + +def test_list_supported_database_flags_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + pages = list(client.list_supported_database_flags(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_supported_database_flags( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in responses) + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_supported_database_flags(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GenerateClientCertificateRequest, + dict, + ], +) +def test_generate_client_certificate(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.GenerateClientCertificateResponse( + pem_certificate="pem_certificate_value", + pem_certificate_chain=["pem_certificate_chain_value"], + ) + response = client.generate_client_certificate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GenerateClientCertificateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.GenerateClientCertificateResponse) + assert response.pem_certificate == "pem_certificate_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + + +def test_generate_client_certificate_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + client.generate_client_certificate() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GenerateClientCertificateRequest() + + +@pytest.mark.asyncio +async def test_generate_client_certificate_async( + transport: str = "grpc_asyncio", + request_type=service.GenerateClientCertificateRequest, +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.GenerateClientCertificateResponse( + pem_certificate="pem_certificate_value", + pem_certificate_chain=["pem_certificate_chain_value"], + ) + ) + response = await client.generate_client_certificate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GenerateClientCertificateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.GenerateClientCertificateResponse) + assert response.pem_certificate == "pem_certificate_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + + +@pytest.mark.asyncio +async def test_generate_client_certificate_async_from_dict(): + await test_generate_client_certificate_async(request_type=dict) + + +def test_generate_client_certificate_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GenerateClientCertificateRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + call.return_value = service.GenerateClientCertificateResponse() + client.generate_client_certificate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_client_certificate_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GenerateClientCertificateRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.GenerateClientCertificateResponse() + ) + await client.generate_client_certificate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_generate_client_certificate_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.GenerateClientCertificateResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_client_certificate( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_generate_client_certificate_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_client_certificate( + service.GenerateClientCertificateRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_generate_client_certificate_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.GenerateClientCertificateResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.GenerateClientCertificateResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_client_certificate( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_client_certificate_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_client_certificate( + service.GenerateClientCertificateRequest(), + parent="parent_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetConnectionInfoRequest, + dict, + ], +) +def test_get_connection_info(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ConnectionInfo( + name="name_value", + ip_address="ip_address_value", + pem_certificate_chain=["pem_certificate_chain_value"], + instance_uid="instance_uid_value", + ) + response = client.get_connection_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetConnectionInfoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ConnectionInfo) + assert response.name == "name_value" + assert response.ip_address == "ip_address_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + assert response.instance_uid == "instance_uid_value" + + +def test_get_connection_info_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + client.get_connection_info() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetConnectionInfoRequest() + + +@pytest.mark.asyncio +async def test_get_connection_info_async( + transport: str = "grpc_asyncio", request_type=service.GetConnectionInfoRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ConnectionInfo( + name="name_value", + ip_address="ip_address_value", + pem_certificate_chain=["pem_certificate_chain_value"], + instance_uid="instance_uid_value", + ) + ) + response = await client.get_connection_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetConnectionInfoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ConnectionInfo) + assert response.name == "name_value" + assert response.ip_address == "ip_address_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + assert response.instance_uid == "instance_uid_value" + + +@pytest.mark.asyncio +async def test_get_connection_info_async_from_dict(): + await test_get_connection_info_async(request_type=dict) + + +def test_get_connection_info_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetConnectionInfoRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + call.return_value = resources.ConnectionInfo() + client.get_connection_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_connection_info_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetConnectionInfoRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ConnectionInfo() + ) + await client.get_connection_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_get_connection_info_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ConnectionInfo() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_connection_info( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_get_connection_info_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection_info( + service.GetConnectionInfoRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_get_connection_info_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.ConnectionInfo() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.ConnectionInfo() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_connection_info( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_connection_info_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_connection_info( + service.GetConnectionInfoRequest(), + parent="parent_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListClustersRequest, + dict, + ], +) +def test_list_clusters_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_clusters_rest_required_fields(request_type=service.ListClustersRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_clusters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_clusters_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_clusters_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_clusters" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_clusters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListClustersRequest.pb(service.ListClustersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListClustersResponse.to_json( + service.ListClustersResponse() + ) + + request = service.ListClustersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListClustersResponse() + + client.list_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_clusters_rest_bad_request( + transport: str = "rest", request_type=service.ListClustersRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_clusters(request) + + +def test_list_clusters_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/clusters" + % client.transport._host, + args[1], + ) + + +def test_list_clusters_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + service.ListClustersRequest(), + parent="parent_value", + ) + + +def test_list_clusters_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListClustersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Cluster) for i in results) + + pages = list(client.list_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetClusterRequest, + dict, + ], +) +def test_get_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Cluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Cluster.State.READY, + cluster_type=resources.Cluster.ClusterType.PRIMARY, + database_version=resources.DatabaseVersion.POSTGRES_13, + network="network_value", + etag="etag_value", + reconciling=True, + backup_source=resources.BackupSource(backup_uid="backup_uid_value"), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_cluster(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Cluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Cluster.State.READY + assert response.cluster_type == resources.Cluster.ClusterType.PRIMARY + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 + assert response.network == "network_value" + assert response.etag == "etag_value" + assert response.reconciling is True + + +def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Cluster() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_get_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetClusterRequest.pb(service.GetClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Cluster.to_json(resources.Cluster()) + + request = service.GetClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Cluster() + + client.get_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_cluster_rest_bad_request( + transport: str = "rest", request_type=service.GetClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_cluster(request) + + +def test_get_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Cluster() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/clusters/*}" + % client.transport._host, + args[1], + ) + + +def test_get_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + service.GetClusterRequest(), + name="name_value", + ) + + +def test_get_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateClusterRequest, + dict, + ], +) +def test_create_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "continuous_backup_config": { + "enabled": True, + "recovery_window_days": 2166, + "encryption_config": {}, + }, + "continuous_backup_info": { + "encryption_info": {}, + "enabled_time": {}, + "schedule": [1], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_cluster_rest_required_fields(request_type=service.CreateClusterRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "clusterId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == request_init["cluster_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "cluster_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_cluster(request) + + expected_params = [ + ( + "clusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "clusterId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "clusterId", + "cluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_create_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateClusterRequest.pb(service.CreateClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_cluster_rest_bad_request( + transport: str = "rest", request_type=service.CreateClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "continuous_backup_config": { + "enabled": True, + "recovery_window_days": 2166, + "encryption_config": {}, + }, + "continuous_backup_info": { + "encryption_info": {}, + "enabled_time": {}, + "schedule": [1], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_cluster(request) + + +def test_create_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/clusters" + % client.transport._host, + args[1], + ) + + +def test_create_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cluster( + service.CreateClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + +def test_create_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateClusterRequest, + dict, + ], +) +def test_update_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "projects/sample1/locations/sample2/clusters/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "continuous_backup_config": { + "enabled": True, + "recovery_window_days": 2166, + "encryption_config": {}, + }, + "continuous_backup_info": { + "encryption_info": {}, + "enabled_time": {}, + "schedule": [1], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set(("cluster",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_update_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateClusterRequest.pb(service.UpdateClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.UpdateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_cluster_rest_bad_request( + transport: str = "rest", request_type=service.UpdateClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "projects/sample1/locations/sample2/clusters/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "continuous_backup_config": { + "enabled": True, + "recovery_window_days": 2166, + "encryption_config": {}, + }, + "continuous_backup_info": { + "encryption_info": {}, + "enabled_time": {}, + "schedule": [1], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_cluster(request) + + +def test_update_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{cluster.name=projects/*/locations/*/clusters/*}" + % client.transport._host, + args[1], + ) + + +def test_update_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cluster( + service.UpdateClusterRequest(), + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "force", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "force", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_delete_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.DeleteClusterRequest.pb(service.DeleteClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.DeleteClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_cluster_rest_bad_request( + transport: str = "rest", request_type=service.DeleteClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_cluster(request) + + +def test_delete_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/clusters/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cluster( + service.DeleteClusterRequest(), + name="name_value", + ) + + +def test_delete_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.PromoteClusterRequest, + dict, + ], +) +def test_promote_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.promote_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_promote_cluster_rest_required_fields( + request_type=service.PromoteClusterRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).promote_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).promote_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.promote_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_promote_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.promote_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_promote_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_promote_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_promote_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.PromoteClusterRequest.pb(service.PromoteClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.PromoteClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.promote_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_promote_cluster_rest_bad_request( + transport: str = "rest", request_type=service.PromoteClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.promote_cluster(request) + + +def test_promote_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.promote_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/clusters/*}:promote" + % client.transport._host, + args[1], + ) + + +def test_promote_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.promote_cluster( + service.PromoteClusterRequest(), + name="name_value", + ) + + +def test_promote_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.RestoreClusterRequest, + dict, + ], +) +def test_restore_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.restore_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_restore_cluster_rest_required_fields( + request_type=service.RestoreClusterRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.restore_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_restore_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.restore_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "clusterId", + "cluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restore_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_restore_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_restore_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.RestoreClusterRequest.pb(service.RestoreClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.RestoreClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restore_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restore_cluster_rest_bad_request( + transport: str = "rest", request_type=service.RestoreClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restore_cluster(request) + + +def test_restore_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateSecondaryClusterRequest, + dict, + ], +) +def test_create_secondary_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "continuous_backup_config": { + "enabled": True, + "recovery_window_days": 2166, + "encryption_config": {}, + }, + "continuous_backup_info": { + "encryption_info": {}, + "enabled_time": {}, + "schedule": [1], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_secondary_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_secondary_cluster_rest_required_fields( + request_type=service.CreateSecondaryClusterRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "clusterId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_secondary_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == request_init["cluster_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_secondary_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "cluster_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_secondary_cluster(request) + + expected_params = [ + ( + "clusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_secondary_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_secondary_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "clusterId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "clusterId", + "cluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_secondary_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_secondary_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_create_secondary_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateSecondaryClusterRequest.pb( + service.CreateSecondaryClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateSecondaryClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_secondary_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_secondary_cluster_rest_bad_request( + transport: str = "rest", request_type=service.CreateSecondaryClusterRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "continuous_backup_config": { + "enabled": True, + "recovery_window_days": 2166, + "encryption_config": {}, + }, + "continuous_backup_info": { + "encryption_info": {}, + "enabled_time": {}, + "schedule": [1], + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_secondary_cluster(request) + + +def test_create_secondary_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_secondary_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/clusters:createsecondary" + % client.transport._host, + args[1], + ) + + +def test_create_secondary_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_secondary_cluster( + service.CreateSecondaryClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + + +def test_create_secondary_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListInstancesRequest, + dict, + ], +) +def test_list_instances_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_rest_required_fields(request_type=service.ListInstancesRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_instances(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_instances" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_instances" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListInstancesRequest.pb(service.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListInstancesResponse.to_json( + service.ListInstancesResponse() + ) + + request = service.ListInstancesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListInstancesResponse() + + client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_instances_rest_bad_request( + transport: str = "rest", request_type=service.ListInstancesRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_instances(request) + + +def test_list_instances_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListInstancesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/clusters/*}/instances" + % client.transport._host, + args[1], + ) + + +def test_list_instances_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + service.ListInstancesRequest(), + parent="parent_value", + ) + + +def test_list_instances_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListInstancesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Instance) for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetInstanceRequest, + dict, + ], +) +def test_get_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Instance( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Instance.State.READY, + instance_type=resources.Instance.InstanceType.PRIMARY, + availability_type=resources.Instance.AvailabilityType.ZONAL, + gce_zone="gce_zone_value", + ip_address="ip_address_value", + reconciling=True, + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Instance.State.READY + assert response.instance_type == resources.Instance.InstanceType.PRIMARY + assert response.availability_type == resources.Instance.AvailabilityType.ZONAL + assert response.gce_zone == "gce_zone_value" + assert response.ip_address == "ip_address_value" + assert response.reconciling is True + assert response.etag == "etag_value" + + +def test_get_instance_rest_required_fields(request_type=service.GetInstanceRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = resources.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_get_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetInstanceRequest.pb(service.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Instance.to_json(resources.Instance()) + + request = service.GetInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Instance() + + client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_rest_bad_request( + transport: str = "rest", request_type=service.GetInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_instance(request) + + +def test_get_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/clusters/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_get_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + service.GetInstanceRequest(), + name="name_value", + ) + + +def test_get_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateInstanceRequest, + dict, + ], +) +def test_create_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_instance_rest_required_fields( + request_type=service.CreateInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "instanceId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == request_init["instance_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "instance_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == "instance_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_instance(request) + + expected_params = [ + ( + "instanceId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "instanceId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "instanceId", + "instance", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_create_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateInstanceRequest.pb(service.CreateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_instance_rest_bad_request( + transport: str = "rest", request_type=service.CreateInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_instance(request) + + +def test_create_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/clusters/*}/instances" + % client.transport._host, + args[1], + ) + + +def test_create_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + service.CreateInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +def test_create_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateSecondaryInstanceRequest, + dict, + ], +) +def test_create_secondary_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_secondary_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_secondary_instance_rest_required_fields( + request_type=service.CreateSecondaryInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "instanceId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_secondary_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == request_init["instance_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_secondary_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "instance_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == "instance_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_secondary_instance(request) + + expected_params = [ + ( + "instanceId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_secondary_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_secondary_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "instanceId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "instanceId", + "instance", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_secondary_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_secondary_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_create_secondary_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateSecondaryInstanceRequest.pb( + service.CreateSecondaryInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateSecondaryInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_secondary_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_secondary_instance_rest_bad_request( + transport: str = "rest", request_type=service.CreateSecondaryInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_secondary_instance(request) + + +def test_create_secondary_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_secondary_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/clusters/*}/instances:createsecondary" + % client.transport._host, + args[1], + ) + + +def test_create_secondary_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_secondary_instance( + service.CreateSecondaryInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +def test_create_secondary_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.BatchCreateInstancesRequest, + dict, + ], +) +def test_batch_create_instances_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["requests"] = { + "create_instance_requests": [ + { + "parent": "parent_value", + "instance_id": "instance_id_value", + "instance": { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + }, + "request_id": "request_id_value", + "validate_only": True, + } + ] + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_create_instances(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_create_instances_rest_required_fields( + request_type=service.BatchCreateInstancesRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_create_instances(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_create_instances_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_create_instances._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_create_instances_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_batch_create_instances" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_batch_create_instances" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.BatchCreateInstancesRequest.pb( + service.BatchCreateInstancesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.BatchCreateInstancesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_create_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_create_instances_rest_bad_request( + transport: str = "rest", request_type=service.BatchCreateInstancesRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["requests"] = { + "create_instance_requests": [ + { + "parent": "parent_value", + "instance_id": "instance_id_value", + "instance": { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + }, + "request_id": "request_id_value", + "validate_only": True, + } + ] + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_create_instances(request) + + +def test_batch_create_instances_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_instance_rest_required_fields( + request_type=service.UpdateInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set(("instance",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_update_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateInstanceRequest.pb(service.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.UpdateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_instance_rest_bad_request( + transport: str = "rest", request_type=service.UpdateInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_instance(request) + + +def test_update_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "instance": { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{instance.name=projects/*/locations/*/clusters/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_update_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + service.UpdateInstanceRequest(), + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteInstanceRequest, + dict, + ], +) +def test_delete_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_instance_rest_required_fields( + request_type=service.DeleteInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_delete_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.DeleteInstanceRequest.pb(service.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.DeleteInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_instance_rest_bad_request( + transport: str = "rest", request_type=service.DeleteInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_instance(request) + + +def test_delete_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/clusters/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + service.DeleteInstanceRequest(), + name="name_value", + ) + + +def test_delete_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.FailoverInstanceRequest, + dict, + ], +) +def test_failover_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.failover_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_failover_instance_rest_required_fields( + request_type=service.FailoverInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).failover_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).failover_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.failover_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_failover_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.failover_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_failover_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_failover_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_failover_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.FailoverInstanceRequest.pb( + service.FailoverInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.FailoverInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.failover_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_failover_instance_rest_bad_request( + transport: str = "rest", request_type=service.FailoverInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.failover_instance(request) + + +def test_failover_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.failover_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/clusters/*/instances/*}:failover" + % client.transport._host, + args[1], + ) + + +def test_failover_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.failover_instance( + service.FailoverInstanceRequest(), + name="name_value", + ) + + +def test_failover_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.RestartInstanceRequest, + dict, + ], +) +def test_restart_instance_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.restart_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_restart_instance_rest_required_fields( + request_type=service.RestartInstanceRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restart_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restart_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.restart_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_restart_instance_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.restart_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restart_instance_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_restart_instance" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_restart_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.RestartInstanceRequest.pb(service.RestartInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.RestartInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restart_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restart_instance_rest_bad_request( + transport: str = "rest", request_type=service.RestartInstanceRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restart_instance(request) + + +def test_restart_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.restart_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/clusters/*/instances/*}:restart" + % client.transport._host, + args[1], + ) + + +def test_restart_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restart_instance( + service.RestartInstanceRequest(), + name="name_value", + ) + + +def test_restart_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListBackupsRequest, + dict, + ], +) +def test_list_backups_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_backups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_rest_required_fields(request_type=service.ListBackupsRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListBackupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_backups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backups_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backups_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_backups" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_backups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListBackupsRequest.pb(service.ListBackupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListBackupsResponse.to_json( + service.ListBackupsResponse() + ) + + request = service.ListBackupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListBackupsResponse() + + client.list_backups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backups_rest_bad_request( + transport: str = "rest", request_type=service.ListBackupsRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backups(request) + + +def test_list_backups_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListBackupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/backups" + % client.transport._host, + args[1], + ) + + +def test_list_backups_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + service.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListBackupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Backup) for i in results) + + pages = list(client.list_backups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetBackupRequest, + dict, + ], +) +def test_get_backup_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Backup( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Backup.State.READY, + type_=resources.Backup.Type.ON_DEMAND, + description="description_value", + cluster_uid="cluster_uid_value", + cluster_name="cluster_name_value", + reconciling=True, + etag="etag_value", + size_bytes=1089, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Backup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Backup.State.READY + assert response.type_ == resources.Backup.Type.ON_DEMAND + assert response.description == "description_value" + assert response.cluster_uid == "cluster_uid_value" + assert response.cluster_name == "cluster_name_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.size_bytes == 1089 + + +def test_get_backup_rest_required_fields(request_type=service.GetBackupRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Backup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = resources.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_backup" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_get_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetBackupRequest.pb(service.GetBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Backup.to_json(resources.Backup()) + + request = service.GetBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Backup() + + client.get_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_rest_bad_request( + transport: str = "rest", request_type=service.GetBackupRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup(request) + + +def test_get_backup_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Backup() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + service.GetBackupRequest(), + name="name_value", + ) + + +def test_get_backup_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateBackupRequest, + dict, + ], +) +def test_create_backup_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "type_": 1, + "description": "description_value", + "cluster_uid": "cluster_uid_value", + "cluster_name": "cluster_name_value", + "reconciling": True, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "etag": "etag_value", + "annotations": {}, + "size_bytes": 1089, + "expiry_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_backup_rest_required_fields(request_type=service.CreateBackupRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "backupId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == request_init["backup_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupId"] = "backup_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == "backup_id_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_backup(request) + + expected_params = [ + ( + "backupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "backupId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "backupId", + "backup", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_backup" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_create_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateBackupRequest.pb(service.CreateBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.CreateBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_rest_bad_request( + transport: str = "rest", request_type=service.CreateBackupRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "type_": 1, + "description": "description_value", + "cluster_uid": "cluster_uid_value", + "cluster_name": "cluster_name_value", + "reconciling": True, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "etag": "etag_value", + "annotations": {}, + "size_bytes": 1089, + "expiry_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup(request) + + +def test_create_backup_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/backups" + % client.transport._host, + args[1], + ) + + +def test_create_backup_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup( + service.CreateBackupRequest(), + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + +def test_create_backup_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateBackupRequest, + dict, + ], +) +def test_update_backup_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "backup": {"name": "projects/sample1/locations/sample2/backups/sample3"} + } + request_init["backup"] = { + "name": "projects/sample1/locations/sample2/backups/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "type_": 1, + "description": "description_value", + "cluster_uid": "cluster_uid_value", + "cluster_name": "cluster_name_value", + "reconciling": True, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "etag": "etag_value", + "annotations": {}, + "size_bytes": 1089, + "expiry_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_backup_rest_required_fields(request_type=service.UpdateBackupRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_backup_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set(("backup",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_backup" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_update_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateBackupRequest.pb(service.UpdateBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.UpdateBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_backup_rest_bad_request( + transport: str = "rest", request_type=service.UpdateBackupRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "backup": {"name": "projects/sample1/locations/sample2/backups/sample3"} + } + request_init["backup"] = { + "name": "projects/sample1/locations/sample2/backups/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "type_": 1, + "description": "description_value", + "cluster_uid": "cluster_uid_value", + "cluster_name": "cluster_name_value", + "reconciling": True, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "etag": "etag_value", + "annotations": {}, + "size_bytes": 1089, + "expiry_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_backup(request) + + +def test_update_backup_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backup": {"name": "projects/sample1/locations/sample2/backups/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{backup.name=projects/*/locations/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_backup_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + service.UpdateBackupRequest(), + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_backup_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_backup_rest_required_fields(request_type=service.DeleteBackupRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_backup" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_delete_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.DeleteBackupRequest.pb(service.DeleteBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = service.DeleteBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_rest_bad_request( + transport: str = "rest", request_type=service.DeleteBackupRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup(request) + + +def test_delete_backup_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + service.DeleteBackupRequest(), + name="name_value", + ) + + +def test_delete_backup_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListSupportedDatabaseFlagsRequest, + dict, + ], +) +def test_list_supported_database_flags_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListSupportedDatabaseFlagsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListSupportedDatabaseFlagsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_supported_database_flags(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSupportedDatabaseFlagsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_supported_database_flags_rest_required_fields( + request_type=service.ListSupportedDatabaseFlagsRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_supported_database_flags._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_supported_database_flags._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListSupportedDatabaseFlagsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListSupportedDatabaseFlagsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_supported_database_flags(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_supported_database_flags_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_supported_database_flags._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_supported_database_flags_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_supported_database_flags" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_supported_database_flags" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListSupportedDatabaseFlagsRequest.pb( + service.ListSupportedDatabaseFlagsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListSupportedDatabaseFlagsResponse.to_json( + service.ListSupportedDatabaseFlagsResponse() + ) + + request = service.ListSupportedDatabaseFlagsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListSupportedDatabaseFlagsResponse() + + client.list_supported_database_flags( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_supported_database_flags_rest_bad_request( + transport: str = "rest", request_type=service.ListSupportedDatabaseFlagsRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_supported_database_flags(request) + + +def test_list_supported_database_flags_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListSupportedDatabaseFlagsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListSupportedDatabaseFlagsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_supported_database_flags(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/supportedDatabaseFlags" + % client.transport._host, + args[1], + ) + + +def test_list_supported_database_flags_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_supported_database_flags( + service.ListSupportedDatabaseFlagsRequest(), + parent="parent_value", + ) + + +def test_list_supported_database_flags_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + service.ListSupportedDatabaseFlagsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_supported_database_flags(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in results) + + pages = list(client.list_supported_database_flags(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GenerateClientCertificateRequest, + dict, + ], +) +def test_generate_client_certificate_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.GenerateClientCertificateResponse( + pem_certificate="pem_certificate_value", + pem_certificate_chain=["pem_certificate_chain_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.GenerateClientCertificateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_client_certificate(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.GenerateClientCertificateResponse) + assert response.pem_certificate == "pem_certificate_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + + +def test_generate_client_certificate_rest_required_fields( + request_type=service.GenerateClientCertificateRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_client_certificate._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_client_certificate._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.GenerateClientCertificateResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.GenerateClientCertificateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.generate_client_certificate(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_client_certificate_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_client_certificate._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_client_certificate_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_generate_client_certificate" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_generate_client_certificate" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GenerateClientCertificateRequest.pb( + service.GenerateClientCertificateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.GenerateClientCertificateResponse.to_json( + service.GenerateClientCertificateResponse() + ) + + request = service.GenerateClientCertificateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.GenerateClientCertificateResponse() + + client.generate_client_certificate( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_client_certificate_rest_bad_request( + transport: str = "rest", request_type=service.GenerateClientCertificateRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_client_certificate(request) + + +def test_generate_client_certificate_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.GenerateClientCertificateResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.GenerateClientCertificateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.generate_client_certificate(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/clusters/*}:generateClientCertificate" + % client.transport._host, + args[1], + ) + + +def test_generate_client_certificate_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_client_certificate( + service.GenerateClientCertificateRequest(), + parent="parent_value", + ) + + +def test_generate_client_certificate_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetConnectionInfoRequest, + dict, + ], +) +def test_get_connection_info_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.ConnectionInfo( + name="name_value", + ip_address="ip_address_value", + pem_certificate_chain=["pem_certificate_chain_value"], + instance_uid="instance_uid_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.ConnectionInfo.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_connection_info(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.ConnectionInfo) + assert response.name == "name_value" + assert response.ip_address == "ip_address_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + assert response.instance_uid == "instance_uid_value" + + +def test_get_connection_info_rest_required_fields( + request_type=service.GetConnectionInfoRequest, +): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_connection_info._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_connection_info._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.ConnectionInfo() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = resources.ConnectionInfo.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_connection_info(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_connection_info_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_connection_info._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_connection_info_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_connection_info" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_get_connection_info" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetConnectionInfoRequest.pb( + service.GetConnectionInfoRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.ConnectionInfo.to_json( + resources.ConnectionInfo() + ) + + request = service.GetConnectionInfoRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.ConnectionInfo() + + client.get_connection_info( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_connection_info_rest_bad_request( + transport: str = "rest", request_type=service.GetConnectionInfoRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_connection_info(request) + + +def test_get_connection_info_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.ConnectionInfo() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = resources.ConnectionInfo.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_connection_info(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/clusters/*/instances/*}/connectionInfo" + % client.transport._host, + args[1], + ) + + +def test_get_connection_info_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection_info( + service.GetConnectionInfoRequest(), + parent="parent_value", + ) + + +def test_get_connection_info_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AlloyDBAdminClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AlloyDBAdminClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AlloyDBAdminClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AlloyDBAdminClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AlloyDBAdminClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AlloyDBAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AlloyDBAdminGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AlloyDBAdminGrpcTransport, + transports.AlloyDBAdminGrpcAsyncIOTransport, + transports.AlloyDBAdminRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AlloyDBAdminClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AlloyDBAdminGrpcTransport, + ) + + +def test_alloy_db_admin_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AlloyDBAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_alloy_db_admin_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.alloydb_v1beta.services.alloy_db_admin.transports.AlloyDBAdminTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AlloyDBAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_clusters", + "get_cluster", + "create_cluster", + "update_cluster", + "delete_cluster", + "promote_cluster", + "restore_cluster", + "create_secondary_cluster", + "list_instances", + "get_instance", + "create_instance", + "create_secondary_instance", + "batch_create_instances", + "update_instance", + "delete_instance", + "failover_instance", + "restart_instance", + "list_backups", + "get_backup", + "create_backup", + "update_backup", + "delete_backup", + "list_supported_database_flags", + "generate_client_certificate", + "get_connection_info", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_alloy_db_admin_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.alloydb_v1beta.services.alloy_db_admin.transports.AlloyDBAdminTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AlloyDBAdminTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_alloy_db_admin_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.alloydb_v1beta.services.alloy_db_admin.transports.AlloyDBAdminTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AlloyDBAdminTransport() + adc.assert_called_once() + + +def test_alloy_db_admin_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AlloyDBAdminClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AlloyDBAdminGrpcTransport, + transports.AlloyDBAdminGrpcAsyncIOTransport, + ], +) +def test_alloy_db_admin_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AlloyDBAdminGrpcTransport, + transports.AlloyDBAdminGrpcAsyncIOTransport, + transports.AlloyDBAdminRestTransport, + ], +) +def test_alloy_db_admin_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AlloyDBAdminGrpcTransport, grpc_helpers), + (transports.AlloyDBAdminGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_alloy_db_admin_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "alloydb.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="alloydb.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.AlloyDBAdminGrpcTransport, transports.AlloyDBAdminGrpcAsyncIOTransport], +) +def test_alloy_db_admin_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_alloy_db_admin_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AlloyDBAdminRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_alloy_db_admin_rest_lro_client(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_alloy_db_admin_host_no_port(transport_name): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="alloydb.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "alloydb.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://alloydb.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_alloy_db_admin_host_with_port(transport_name): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="alloydb.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "alloydb.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://alloydb.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_alloy_db_admin_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AlloyDBAdminClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AlloyDBAdminClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_clusters._session + session2 = client2.transport.list_clusters._session + assert session1 != session2 + session1 = client1.transport.get_cluster._session + session2 = client2.transport.get_cluster._session + assert session1 != session2 + session1 = client1.transport.create_cluster._session + session2 = client2.transport.create_cluster._session + assert session1 != session2 + session1 = client1.transport.update_cluster._session + session2 = client2.transport.update_cluster._session + assert session1 != session2 + session1 = client1.transport.delete_cluster._session + session2 = client2.transport.delete_cluster._session + assert session1 != session2 + session1 = client1.transport.promote_cluster._session + session2 = client2.transport.promote_cluster._session + assert session1 != session2 + session1 = client1.transport.restore_cluster._session + session2 = client2.transport.restore_cluster._session + assert session1 != session2 + session1 = client1.transport.create_secondary_cluster._session + session2 = client2.transport.create_secondary_cluster._session + assert session1 != session2 + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.get_instance._session + session2 = client2.transport.get_instance._session + assert session1 != session2 + session1 = client1.transport.create_instance._session + session2 = client2.transport.create_instance._session + assert session1 != session2 + session1 = client1.transport.create_secondary_instance._session + session2 = client2.transport.create_secondary_instance._session + assert session1 != session2 + session1 = client1.transport.batch_create_instances._session + session2 = client2.transport.batch_create_instances._session + assert session1 != session2 + session1 = client1.transport.update_instance._session + session2 = client2.transport.update_instance._session + assert session1 != session2 + session1 = client1.transport.delete_instance._session + session2 = client2.transport.delete_instance._session + assert session1 != session2 + session1 = client1.transport.failover_instance._session + session2 = client2.transport.failover_instance._session + assert session1 != session2 + session1 = client1.transport.restart_instance._session + session2 = client2.transport.restart_instance._session + assert session1 != session2 + session1 = client1.transport.list_backups._session + session2 = client2.transport.list_backups._session + assert session1 != session2 + session1 = client1.transport.get_backup._session + session2 = client2.transport.get_backup._session + assert session1 != session2 + session1 = client1.transport.create_backup._session + session2 = client2.transport.create_backup._session + assert session1 != session2 + session1 = client1.transport.update_backup._session + session2 = client2.transport.update_backup._session + assert session1 != session2 + session1 = client1.transport.delete_backup._session + session2 = client2.transport.delete_backup._session + assert session1 != session2 + session1 = client1.transport.list_supported_database_flags._session + session2 = client2.transport.list_supported_database_flags._session + assert session1 != session2 + session1 = client1.transport.generate_client_certificate._session + session2 = client2.transport.generate_client_certificate._session + assert session1 != session2 + session1 = client1.transport.get_connection_info._session + session2 = client2.transport.get_connection_info._session + assert session1 != session2 + + +def test_alloy_db_admin_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AlloyDBAdminGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_alloy_db_admin_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AlloyDBAdminGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.AlloyDBAdminGrpcTransport, transports.AlloyDBAdminGrpcAsyncIOTransport], +) +def test_alloy_db_admin_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.AlloyDBAdminGrpcTransport, transports.AlloyDBAdminGrpcAsyncIOTransport], +) +def test_alloy_db_admin_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_alloy_db_admin_grpc_lro_client(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_alloy_db_admin_grpc_lro_async_client(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_backup_path(): + project = "squid" + location = "clam" + backup = "whelk" + expected = "projects/{project}/locations/{location}/backups/{backup}".format( + project=project, + location=location, + backup=backup, + ) + actual = AlloyDBAdminClient.backup_path(project, location, backup) + assert expected == actual + + +def test_parse_backup_path(): + expected = { + "project": "octopus", + "location": "oyster", + "backup": "nudibranch", + } + path = AlloyDBAdminClient.backup_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_backup_path(path) + assert expected == actual + + +def test_cluster_path(): + project = "cuttlefish" + location = "mussel" + cluster = "winkle" + expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, + location=location, + cluster=cluster, + ) + actual = AlloyDBAdminClient.cluster_path(project, location, cluster) + assert expected == actual + + +def test_parse_cluster_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "cluster": "abalone", + } + path = AlloyDBAdminClient.cluster_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_cluster_path(path) + assert expected == actual + + +def test_connection_info_path(): + project = "squid" + location = "clam" + cluster = "whelk" + instance = "octopus" + expected = "projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance}/connectionInfo".format( + project=project, + location=location, + cluster=cluster, + instance=instance, + ) + actual = AlloyDBAdminClient.connection_info_path( + project, location, cluster, instance + ) + assert expected == actual + + +def test_parse_connection_info_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "cluster": "cuttlefish", + "instance": "mussel", + } + path = AlloyDBAdminClient.connection_info_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_connection_info_path(path) + assert expected == actual + + +def test_crypto_key_version_path(): + project = "winkle" + location = "nautilus" + key_ring = "scallop" + crypto_key = "abalone" + crypto_key_version = "squid" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + crypto_key_version=crypto_key_version, + ) + actual = AlloyDBAdminClient.crypto_key_version_path( + project, location, key_ring, crypto_key, crypto_key_version + ) + assert expected == actual + + +def test_parse_crypto_key_version_path(): + expected = { + "project": "clam", + "location": "whelk", + "key_ring": "octopus", + "crypto_key": "oyster", + "crypto_key_version": "nudibranch", + } + path = AlloyDBAdminClient.crypto_key_version_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_crypto_key_version_path(path) + assert expected == actual + + +def test_instance_path(): + project = "cuttlefish" + location = "mussel" + cluster = "winkle" + instance = "nautilus" + expected = "projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance}".format( + project=project, + location=location, + cluster=cluster, + instance=instance, + ) + actual = AlloyDBAdminClient.instance_path(project, location, cluster, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "scallop", + "location": "abalone", + "cluster": "squid", + "instance": "clam", + } + path = AlloyDBAdminClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_instance_path(path) + assert expected == actual + + +def test_network_path(): + project = "whelk" + network = "octopus" + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + actual = AlloyDBAdminClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "oyster", + "network": "nudibranch", + } + path = AlloyDBAdminClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_network_path(path) + assert expected == actual + + +def test_supported_database_flag_path(): + project = "cuttlefish" + location = "mussel" + flag = "winkle" + expected = "projects/{project}/locations/{location}/flags/{flag}".format( + project=project, + location=location, + flag=flag, + ) + actual = AlloyDBAdminClient.supported_database_flag_path(project, location, flag) + assert expected == actual + + +def test_parse_supported_database_flag_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "flag": "abalone", + } + path = AlloyDBAdminClient.supported_database_flag_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_supported_database_flag_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AlloyDBAdminClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = AlloyDBAdminClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AlloyDBAdminClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = AlloyDBAdminClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AlloyDBAdminClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = AlloyDBAdminClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = AlloyDBAdminClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = AlloyDBAdminClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AlloyDBAdminClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = AlloyDBAdminClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AlloyDBAdminTransport, "_prep_wrapped_messages" + ) as prep: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AlloyDBAdminTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AlloyDBAdminClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = AlloyDBAdminClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = AlloyDBAdminAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport), + (AlloyDBAdminAsyncClient, transports.AlloyDBAdminGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-apigee-connect/.OwlBot.yaml b/packages/google-cloud-apigee-connect/.OwlBot.yaml new file mode 100644 index 000000000000..46c7a7f0960e --- /dev/null +++ b/packages/google-cloud-apigee-connect/.OwlBot.yaml @@ -0,0 +1,23 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/apigeeconnect/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-apigee-connect/$1 + +begin-after-commit-hash: 70f7f0525414fe4dfeb2fc2e81546b073f83a621 diff --git a/packages/google-cloud-apigee-connect/.coveragerc b/packages/google-cloud-apigee-connect/.coveragerc new file mode 100644 index 000000000000..1f5fd969d9de --- /dev/null +++ b/packages/google-cloud-apigee-connect/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/apigeeconnect/__init__.py + google/cloud/apigeeconnect/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-apigee-connect/.flake8 b/packages/google-cloud-apigee-connect/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-cloud-apigee-connect/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-apigee-connect/.gitignore b/packages/google-cloud-apigee-connect/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-apigee-connect/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-apigee-connect/.repo-metadata.json b/packages/google-cloud-apigee-connect/.repo-metadata.json new file mode 100644 index 000000000000..1f0d05cb266c --- /dev/null +++ b/packages/google-cloud-apigee-connect/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "apigeeconnect", + "name_pretty": "Apigee Connect", + "product_documentation": "https://cloud.google.com/apigee/docs/hybrid/v1.4/apigee-connect", + "client_documentation": "https://cloud.google.com/python/docs/reference/apigeeconnect/latest", + "issue_tracker": "", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-apigee-connect", + "api_id": "apigeeconnect.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "apigeeconnect", + "api_description": "allows the Apigee hybrid management plane to connect securely to the MART service in the runtime plane without requiring you to expose the MART endpoint on the internet." +} diff --git a/packages/google-cloud-apigee-connect/CHANGELOG.md b/packages/google-cloud-apigee-connect/CHANGELOG.md new file mode 100644 index 000000000000..6759d87c0754 --- /dev/null +++ b/packages/google-cloud-apigee-connect/CHANGELOG.md @@ -0,0 +1,228 @@ +# Changelog + +## [1.7.1](https://github.com/googleapis/python-apigee-connect/compare/v1.7.0...v1.7.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([514bdf3](https://github.com/googleapis/python-apigee-connect/commit/514bdf3795b7bba012236d357155b5d798f60df0)) + + +### Documentation + +* Add documentation for enums ([514bdf3](https://github.com/googleapis/python-apigee-connect/commit/514bdf3795b7bba012236d357155b5d798f60df0)) + +## [1.7.0](https://github.com/googleapis/python-apigee-connect/compare/v1.6.1...v1.7.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#168](https://github.com/googleapis/python-apigee-connect/issues/168)) ([a340710](https://github.com/googleapis/python-apigee-connect/commit/a34071088707b876d42bf687fb12b133724c0cc6)) + +## [1.6.1](https://github.com/googleapis/python-apigee-connect/compare/v1.6.0...v1.6.1) (2022-12-08) + + +### Bug Fixes + +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([15ed1e8](https://github.com/googleapis/python-apigee-connect/commit/15ed1e868a37c5818e137a0f95e8a48bed6d4cd9)) +* Drop usage of pkg_resources ([15ed1e8](https://github.com/googleapis/python-apigee-connect/commit/15ed1e868a37c5818e137a0f95e8a48bed6d4cd9)) +* Fix timeout default values ([15ed1e8](https://github.com/googleapis/python-apigee-connect/commit/15ed1e868a37c5818e137a0f95e8a48bed6d4cd9)) + + +### Documentation + +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([15ed1e8](https://github.com/googleapis/python-apigee-connect/commit/15ed1e868a37c5818e137a0f95e8a48bed6d4cd9)) + +## [1.6.0](https://github.com/googleapis/python-apigee-connect/compare/v1.5.0...v1.6.0) (2022-11-16) + + +### Features + +* Add typing to proto.Message based class attributes ([6ca9292](https://github.com/googleapis/python-apigee-connect/commit/6ca929223b74d3e2f26d6b34b3c937db8250a4e3)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([6ca9292](https://github.com/googleapis/python-apigee-connect/commit/6ca929223b74d3e2f26d6b34b3c937db8250a4e3)) + +## [1.5.0](https://github.com/googleapis/python-apigee-connect/compare/v1.4.3...v1.5.0) (2022-11-08) + + +### Features + +* add support for `google.cloud.apigeeconnect.__version__` ([ec1f84c](https://github.com/googleapis/python-apigee-connect/commit/ec1f84c1685df65a1805051bc5092f04f4cec87d)) + + +### Bug Fixes + +* Add dict typing for client_options ([ec1f84c](https://github.com/googleapis/python-apigee-connect/commit/ec1f84c1685df65a1805051bc5092f04f4cec87d)) +* **deps:** require google-api-core >=1.33.2 ([ec1f84c](https://github.com/googleapis/python-apigee-connect/commit/ec1f84c1685df65a1805051bc5092f04f4cec87d)) + +## [1.4.3](https://github.com/googleapis/python-apigee-connect/compare/v1.4.2...v1.4.3) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#151](https://github.com/googleapis/python-apigee-connect/issues/151)) ([baca117](https://github.com/googleapis/python-apigee-connect/commit/baca117237ee80e889366fa54ff1bd0fb54e2661)) + +## [1.4.2](https://github.com/googleapis/python-apigee-connect/compare/v1.4.1...v1.4.2) (2022-09-29) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#148](https://github.com/googleapis/python-apigee-connect/issues/148)) ([43ff689](https://github.com/googleapis/python-apigee-connect/commit/43ff68925dbd3a8db86f1e2a035ee91f45948ae1)) + +## [1.4.1](https://github.com/googleapis/python-apigee-connect/compare/v1.4.0...v1.4.1) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#133](https://github.com/googleapis/python-apigee-connect/issues/133)) ([25c184a](https://github.com/googleapis/python-apigee-connect/commit/25c184aa094ac77ee92bf87d6c4160d2fd20b4a8)) +* **deps:** require proto-plus >= 1.22.0 ([25c184a](https://github.com/googleapis/python-apigee-connect/commit/25c184aa094ac77ee92bf87d6c4160d2fd20b4a8)) + +## [1.4.0](https://github.com/googleapis/python-apigee-connect/compare/v1.3.2...v1.4.0) (2022-07-17) + + +### Features + +* add audience parameter ([59bcc9c](https://github.com/googleapis/python-apigee-connect/commit/59bcc9ca7ceb1aeffb74c5989e2e47ad73a4b434)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#124](https://github.com/googleapis/python-apigee-connect/issues/124)) ([59bcc9c](https://github.com/googleapis/python-apigee-connect/commit/59bcc9ca7ceb1aeffb74c5989e2e47ad73a4b434)) +* require python 3.7+ ([#126](https://github.com/googleapis/python-apigee-connect/issues/126)) ([2df9df8](https://github.com/googleapis/python-apigee-connect/commit/2df9df8eef41cd34e3634e93b1f9aabc28b9973b)) + +## [1.3.2](https://github.com/googleapis/python-apigee-connect/compare/v1.3.1...v1.3.2) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#115](https://github.com/googleapis/python-apigee-connect/issues/115)) ([4abbe8b](https://github.com/googleapis/python-apigee-connect/commit/4abbe8b68e1a8bb9f32b6f37f8f9462da0179b56)) + + +### Documentation + +* fix changelog header to consistent size ([#116](https://github.com/googleapis/python-apigee-connect/issues/116)) ([de3022c](https://github.com/googleapis/python-apigee-connect/commit/de3022cfe32ff6cc796bd24fb1a6f9935599ad96)) + +## [1.3.1](https://github.com/googleapis/python-apigee-connect/compare/v1.3.0...v1.3.1) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#92](https://github.com/googleapis/python-apigee-connect/issues/92)) ([2cee45c](https://github.com/googleapis/python-apigee-connect/commit/2cee45c4aa1873e30a461586432f94ba1f5e04fe)) + +## [1.3.0](https://github.com/googleapis/python-apigee-connect/compare/v1.2.1...v1.3.0) (2022-02-26) + + +### Features + +* add api key support ([#79](https://github.com/googleapis/python-apigee-connect/issues/79)) ([ae944c1](https://github.com/googleapis/python-apigee-connect/commit/ae944c10b2f63a682cf2f196d2122ccbed2dac48)) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([2ff0df1](https://github.com/googleapis/python-apigee-connect/commit/2ff0df184986fcaee52af77bfe7c26348394e0fd)) + + +### Documentation + +* add autogenerated code snippets ([86b45c2](https://github.com/googleapis/python-apigee-connect/commit/86b45c2379df54b274d192e5a46e6348e4f86005)) + +## [1.2.1](https://www.github.com/googleapis/python-apigee-connect/compare/v1.2.0...v1.2.1) (2021-11-01) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([4cd9ea7](https://www.github.com/googleapis/python-apigee-connect/commit/4cd9ea7bd45e5a3410e6b1bde0a0ab629f75530d)) +* **deps:** require google-api-core >= 1.28.0 ([4cd9ea7](https://www.github.com/googleapis/python-apigee-connect/commit/4cd9ea7bd45e5a3410e6b1bde0a0ab629f75530d)) + + +### Documentation + +* list oneofs in docstring ([4cd9ea7](https://www.github.com/googleapis/python-apigee-connect/commit/4cd9ea7bd45e5a3410e6b1bde0a0ab629f75530d)) + +## [1.2.0](https://www.github.com/googleapis/python-apigee-connect/compare/v1.1.0...v1.2.0) (2021-10-15) + + +### Features + +* add support for python 3.10 ([#56](https://www.github.com/googleapis/python-apigee-connect/issues/56)) ([f119149](https://www.github.com/googleapis/python-apigee-connect/commit/f119149b8343b0a8ae84dbe3d228f6b422d13b20)) + +## [1.1.0](https://www.github.com/googleapis/python-apigee-connect/compare/v1.0.2...v1.1.0) (2021-10-08) + + +### Features + +* add context manager support in client ([#54](https://www.github.com/googleapis/python-apigee-connect/issues/54)) ([0dc1edd](https://www.github.com/googleapis/python-apigee-connect/commit/0dc1eddc5fae79b2516789587eb097190c8f1420)) + +## [1.0.2](https://www.github.com/googleapis/python-apigee-connect/compare/v1.0.1...v1.0.2) (2021-09-30) + + +### Bug Fixes + +* improper types in pagers generation ([fb8b837](https://www.github.com/googleapis/python-apigee-connect/commit/fb8b837d64c9771d7af9d36688e00243794ea731)) + +## [1.0.1](https://www.github.com/googleapis/python-apigee-connect/compare/v1.0.0...v1.0.1) (2021-09-24) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([50db178](https://www.github.com/googleapis/python-apigee-connect/commit/50db178c9dec3b1214d537da0a9bd9088d9ebf43)) + + +## [1.0.0](https://www.github.com/googleapis/python-apigee-connect/compare/v0.2.2...v1.0.0) (2021-08-03) + + +### Features + +* bump release level to production/stable ([#28](https://www.github.com/googleapis/python-apigee-connect/issues/28)) ([65803db](https://www.github.com/googleapis/python-apigee-connect/commit/65803db4821b594124bfb88012ba4f954568a895)) + +## [0.2.2](https://www.github.com/googleapis/python-apigee-connect/compare/v0.2.1...v0.2.2) (2021-07-29) + + +### Bug Fixes + +* enable self signed jwt for grpc ([#24](https://www.github.com/googleapis/python-apigee-connect/issues/24)) ([d02ffa1](https://www.github.com/googleapis/python-apigee-connect/commit/d02ffa1f4906da6ba8d479b18947e6d28ccb1987)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#20](https://www.github.com/googleapis/python-apigee-connect/issues/20)) ([6f37510](https://www.github.com/googleapis/python-apigee-connect/commit/6f37510de8b5bd00720335f1343354ee2c932d9e)) + + +### Miscellaneous Chores + +* release as 0.2.2 ([#25](https://www.github.com/googleapis/python-apigee-connect/issues/25)) ([f6660d3](https://www.github.com/googleapis/python-apigee-connect/commit/f6660d3d604c61b748abe59a51923b74b687d761)) + +## [0.2.1](https://www.github.com/googleapis/python-apigee-connect/compare/v0.2.0...v0.2.1) (2021-07-21) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#19](https://www.github.com/googleapis/python-apigee-connect/issues/19)) ([cfa46e7](https://www.github.com/googleapis/python-apigee-connect/commit/cfa46e7528595d57f396839ef167cb8ede29981b)) + +## [0.2.0](https://www.github.com/googleapis/python-apigee-connect/compare/v0.1.0...v0.2.0) (2021-07-14) + + +### Features + +* add always_use_jwt_access ([#11](https://www.github.com/googleapis/python-apigee-connect/issues/11)) ([bdd0f21](https://www.github.com/googleapis/python-apigee-connect/commit/bdd0f2109e34d100c285ab355f302326816f24c8)) + + +### Bug Fixes + +* disable always_use_jwt_access ([f4cff83](https://www.github.com/googleapis/python-apigee-connect/commit/f4cff83baf1865477139bd14b02a12e47505150d)) +* disable always_use_jwt_access ([#15](https://www.github.com/googleapis/python-apigee-connect/issues/15)) ([f4cff83](https://www.github.com/googleapis/python-apigee-connect/commit/f4cff83baf1865477139bd14b02a12e47505150d)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-apigee-connect/issues/1127)) ([#6](https://www.github.com/googleapis/python-apigee-connect/issues/6)) ([a465d9a](https://www.github.com/googleapis/python-apigee-connect/commit/a465d9a37f4738f0f16e89b4c26a74366c0834fb)), closes [#1126](https://www.github.com/googleapis/python-apigee-connect/issues/1126) + +## 0.1.0 (2021-06-13) + + +### Features + +* generate v1 ([b52b2bf](https://www.github.com/googleapis/python-apigee-connect/commit/b52b2bf1f8456eb7fc815857d90384d2f596d23a)) diff --git a/packages/google-cloud-apigee-connect/CODE_OF_CONDUCT.md b/packages/google-cloud-apigee-connect/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-apigee-connect/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-apigee-connect/CONTRIBUTING.rst b/packages/google-cloud-apigee-connect/CONTRIBUTING.rst new file mode 100644 index 000000000000..feb8224434d7 --- /dev/null +++ b/packages/google-cloud-apigee-connect/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-apigee-connect + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-apigee-connect/LICENSE b/packages/google-cloud-apigee-connect/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-apigee-connect/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-apigee-connect/MANIFEST.in b/packages/google-cloud-apigee-connect/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-cloud-apigee-connect/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-apigee-connect/README.rst b/packages/google-cloud-apigee-connect/README.rst new file mode 100644 index 000000000000..1ebe5c83280a --- /dev/null +++ b/packages/google-cloud-apigee-connect/README.rst @@ -0,0 +1,107 @@ +Python Client for Apigee Connect +================================ + +|stable| |pypi| |versions| + +`Apigee Connect`_: allows the Apigee hybrid management plane to connect securely to the MART service in the runtime plane without requiring you to expose the MART endpoint on the internet. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-apigee-connect.svg + :target: https://pypi.org/project/google-cloud-apigee-connect/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-apigee-connect.svg + :target: https://pypi.org/project/google-cloud-apigee-connect/ +.. _Apigee Connect: https://cloud.google.com/apigee/docs/hybrid/v1.4/apigee-connect +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/apigeeconnect/latest +.. _Product Documentation: https://cloud.google.com/apigee/docs/hybrid/v1.4/apigee-connect + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Apigee Connect.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Apigee Connect.: https://cloud.google.com/apigee/docs/hybrid/v1.4/apigee-connect +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-apigee-connect + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-apigee-connect + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Apigee Connect + to see other available methods on the client. +- Read the `Apigee Connect Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Apigee Connect Product documentation: https://cloud.google.com/apigee/docs/hybrid/v1.4/apigee-connect +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-apigee-connect/SECURITY.md b/packages/google-cloud-apigee-connect/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-apigee-connect/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-apigee-connect/docs/CHANGELOG.md b/packages/google-cloud-apigee-connect/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-apigee-connect/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-apigee-connect/docs/README.rst b/packages/google-cloud-apigee-connect/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-apigee-connect/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-apigee-connect/docs/_static/custom.css b/packages/google-cloud-apigee-connect/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-apigee-connect/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-apigee-connect/docs/_templates/layout.html b/packages/google-cloud-apigee-connect/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-apigee-connect/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-apigee-connect/docs/apigeeconnect_v1/connection_service.rst b/packages/google-cloud-apigee-connect/docs/apigeeconnect_v1/connection_service.rst new file mode 100644 index 000000000000..b4bcba915d8e --- /dev/null +++ b/packages/google-cloud-apigee-connect/docs/apigeeconnect_v1/connection_service.rst @@ -0,0 +1,10 @@ +ConnectionService +----------------------------------- + +.. automodule:: google.cloud.apigeeconnect_v1.services.connection_service + :members: + :inherited-members: + +.. automodule:: google.cloud.apigeeconnect_v1.services.connection_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-apigee-connect/docs/apigeeconnect_v1/services.rst b/packages/google-cloud-apigee-connect/docs/apigeeconnect_v1/services.rst new file mode 100644 index 000000000000..bf624911eda2 --- /dev/null +++ b/packages/google-cloud-apigee-connect/docs/apigeeconnect_v1/services.rst @@ -0,0 +1,7 @@ +Services for Google Cloud Apigeeconnect v1 API +============================================== +.. toctree:: + :maxdepth: 2 + + connection_service + tether diff --git a/packages/google-cloud-apigee-connect/docs/apigeeconnect_v1/tether.rst b/packages/google-cloud-apigee-connect/docs/apigeeconnect_v1/tether.rst new file mode 100644 index 000000000000..dce7abab2d92 --- /dev/null +++ b/packages/google-cloud-apigee-connect/docs/apigeeconnect_v1/tether.rst @@ -0,0 +1,6 @@ +Tether +------------------------ + +.. automodule:: google.cloud.apigeeconnect_v1.services.tether + :members: + :inherited-members: diff --git a/packages/google-cloud-apigee-connect/docs/apigeeconnect_v1/types.rst b/packages/google-cloud-apigee-connect/docs/apigeeconnect_v1/types.rst new file mode 100644 index 000000000000..13f9b4acf2e9 --- /dev/null +++ b/packages/google-cloud-apigee-connect/docs/apigeeconnect_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Apigeeconnect v1 API +=========================================== + +.. automodule:: google.cloud.apigeeconnect_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-apigee-connect/docs/conf.py b/packages/google-cloud-apigee-connect/docs/conf.py new file mode 100644 index 000000000000..ac685c3d9206 --- /dev/null +++ b/packages/google-cloud-apigee-connect/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-apigee-connect documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-apigee-connect" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-apigee-connect", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-apigee-connect-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-apigee-connect.tex", + "google-cloud-apigee-connect Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-apigee-connect", + "google-cloud-apigee-connect Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-apigee-connect", + "google-cloud-apigee-connect Documentation", + author, + "google-cloud-apigee-connect", + "google-cloud-apigee-connect Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-apigee-connect/docs/index.rst b/packages/google-cloud-apigee-connect/docs/index.rst new file mode 100644 index 000000000000..a91704778078 --- /dev/null +++ b/packages/google-cloud-apigee-connect/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + apigeeconnect_v1/services + apigeeconnect_v1/types + + +Changelog +--------- + +For a list of all ``google-cloud-apigee-connect`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-apigee-connect/docs/multiprocessing.rst b/packages/google-cloud-apigee-connect/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-apigee-connect/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/__init__.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/__init__.py new file mode 100644 index 000000000000..58946d8f1ed0 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/__init__.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.apigeeconnect import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.apigeeconnect_v1.services.connection_service.async_client import ( + ConnectionServiceAsyncClient, +) +from google.cloud.apigeeconnect_v1.services.connection_service.client import ( + ConnectionServiceClient, +) +from google.cloud.apigeeconnect_v1.services.tether.async_client import TetherAsyncClient +from google.cloud.apigeeconnect_v1.services.tether.client import TetherClient +from google.cloud.apigeeconnect_v1.types.connection import ( + Cluster, + Connection, + ListConnectionsRequest, + ListConnectionsResponse, +) +from google.cloud.apigeeconnect_v1.types.tether import ( + Action, + EgressRequest, + EgressResponse, + Header, + HttpRequest, + HttpResponse, + Payload, + Scheme, + StreamInfo, + TetherEndpoint, + Url, +) + +__all__ = ( + "ConnectionServiceClient", + "ConnectionServiceAsyncClient", + "TetherClient", + "TetherAsyncClient", + "Cluster", + "Connection", + "ListConnectionsRequest", + "ListConnectionsResponse", + "EgressRequest", + "EgressResponse", + "Header", + "HttpRequest", + "HttpResponse", + "Payload", + "StreamInfo", + "Url", + "Action", + "Scheme", + "TetherEndpoint", +) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py new file mode 100644 index 000000000000..84856f09232b --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.7.1" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/py.typed b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/py.typed new file mode 100644 index 000000000000..4d0fa2ef3dd2 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-apigee-connect package uses inline types. diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/__init__.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/__init__.py new file mode 100644 index 000000000000..f5e216b8987d --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/__init__.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.apigeeconnect_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.connection_service import ( + ConnectionServiceAsyncClient, + ConnectionServiceClient, +) +from .services.tether import TetherAsyncClient, TetherClient +from .types.connection import ( + Cluster, + Connection, + ListConnectionsRequest, + ListConnectionsResponse, +) +from .types.tether import ( + Action, + EgressRequest, + EgressResponse, + Header, + HttpRequest, + HttpResponse, + Payload, + Scheme, + StreamInfo, + TetherEndpoint, + Url, +) + +__all__ = ( + "ConnectionServiceAsyncClient", + "TetherAsyncClient", + "Action", + "Cluster", + "Connection", + "ConnectionServiceClient", + "EgressRequest", + "EgressResponse", + "Header", + "HttpRequest", + "HttpResponse", + "ListConnectionsRequest", + "ListConnectionsResponse", + "Payload", + "Scheme", + "StreamInfo", + "TetherClient", + "TetherEndpoint", + "Url", +) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_metadata.json b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_metadata.json new file mode 100644 index 000000000000..15811317ddfa --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_metadata.json @@ -0,0 +1,57 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.apigeeconnect_v1", + "protoPackage": "google.cloud.apigeeconnect.v1", + "schema": "1.0", + "services": { + "ConnectionService": { + "clients": { + "grpc": { + "libraryClient": "ConnectionServiceClient", + "rpcs": { + "ListConnections": { + "methods": [ + "list_connections" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ConnectionServiceAsyncClient", + "rpcs": { + "ListConnections": { + "methods": [ + "list_connections" + ] + } + } + } + } + }, + "Tether": { + "clients": { + "grpc": { + "libraryClient": "TetherClient", + "rpcs": { + "Egress": { + "methods": [ + "egress" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TetherAsyncClient", + "rpcs": { + "Egress": { + "methods": [ + "egress" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py new file mode 100644 index 000000000000..84856f09232b --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.7.1" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/py.typed b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/py.typed new file mode 100644 index 000000000000..4d0fa2ef3dd2 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-apigee-connect package uses inline types. diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/__init__.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/__init__.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/__init__.py new file mode 100644 index 000000000000..34709ef0fd7b --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ConnectionServiceAsyncClient +from .client import ConnectionServiceClient + +__all__ = ( + "ConnectionServiceClient", + "ConnectionServiceAsyncClient", +) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/async_client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/async_client.py new file mode 100644 index 000000000000..4b0090fcc2d1 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/async_client.py @@ -0,0 +1,357 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apigeeconnect_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.apigeeconnect_v1.services.connection_service import pagers +from google.cloud.apigeeconnect_v1.types import connection + +from .client import ConnectionServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ConnectionServiceTransport +from .transports.grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport + + +class ConnectionServiceAsyncClient: + """Service Interface for the Apigee Connect connection + management APIs. + """ + + _client: ConnectionServiceClient + + DEFAULT_ENDPOINT = ConnectionServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ConnectionServiceClient.DEFAULT_MTLS_ENDPOINT + + endpoint_path = staticmethod(ConnectionServiceClient.endpoint_path) + parse_endpoint_path = staticmethod(ConnectionServiceClient.parse_endpoint_path) + common_billing_account_path = staticmethod( + ConnectionServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ConnectionServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ConnectionServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ConnectionServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ConnectionServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ConnectionServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ConnectionServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ConnectionServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ConnectionServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ConnectionServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConnectionServiceAsyncClient: The constructed client. + """ + return ConnectionServiceClient.from_service_account_info.__func__(ConnectionServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConnectionServiceAsyncClient: The constructed client. + """ + return ConnectionServiceClient.from_service_account_file.__func__(ConnectionServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ConnectionServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ConnectionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ConnectionServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ConnectionServiceClient).get_transport_class, type(ConnectionServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ConnectionServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the connection service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ConnectionServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ConnectionServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_connections( + self, + request: Optional[Union[connection.ListConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConnectionsAsyncPager: + r"""Lists connections that are currently active for the + given Apigee Connect endpoint. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apigeeconnect_v1 + + async def sample_list_connections(): + # Create a client + client = apigeeconnect_v1.ConnectionServiceAsyncClient() + + # Initialize request argument(s) + request = apigeeconnect_v1.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.apigeeconnect_v1.types.ListConnectionsRequest, dict]]): + The request object. The request for + [ListConnections][Management.ListConnections]. + parent (:class:`str`): + Required. Parent name of the form: + ``projects/{project_number or project_id}/endpoints/{endpoint}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apigeeconnect_v1.services.connection_service.pagers.ListConnectionsAsyncPager: + The response for + [ListConnections][Management.ListConnections]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = connection.ListConnectionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_connections, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListConnectionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ConnectionServiceAsyncClient",) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py new file mode 100644 index 000000000000..351de7f7cfd9 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py @@ -0,0 +1,576 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apigeeconnect_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.apigeeconnect_v1.services.connection_service import pagers +from google.cloud.apigeeconnect_v1.types import connection + +from .transports.base import DEFAULT_CLIENT_INFO, ConnectionServiceTransport +from .transports.grpc import ConnectionServiceGrpcTransport +from .transports.grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport + + +class ConnectionServiceClientMeta(type): + """Metaclass for the ConnectionService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ConnectionServiceTransport]] + _transport_registry["grpc"] = ConnectionServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ConnectionServiceGrpcAsyncIOTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ConnectionServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ConnectionServiceClient(metaclass=ConnectionServiceClientMeta): + """Service Interface for the Apigee Connect connection + management APIs. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "apigeeconnect.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConnectionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConnectionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ConnectionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ConnectionServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def endpoint_path( + project: str, + endpoint: str, + ) -> str: + """Returns a fully-qualified endpoint string.""" + return "projects/{project}/endpoints/{endpoint}".format( + project=project, + endpoint=endpoint, + ) + + @staticmethod + def parse_endpoint_path(path: str) -> Dict[str, str]: + """Parses a endpoint path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/endpoints/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ConnectionServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the connection service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ConnectionServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ConnectionServiceTransport): + # transport is a ConnectionServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_connections( + self, + request: Optional[Union[connection.ListConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConnectionsPager: + r"""Lists connections that are currently active for the + given Apigee Connect endpoint. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apigeeconnect_v1 + + def sample_list_connections(): + # Create a client + client = apigeeconnect_v1.ConnectionServiceClient() + + # Initialize request argument(s) + request = apigeeconnect_v1.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.apigeeconnect_v1.types.ListConnectionsRequest, dict]): + The request object. The request for + [ListConnections][Management.ListConnections]. + parent (str): + Required. Parent name of the form: + ``projects/{project_number or project_id}/endpoints/{endpoint}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.apigeeconnect_v1.services.connection_service.pagers.ListConnectionsPager: + The response for + [ListConnections][Management.ListConnections]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a connection.ListConnectionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, connection.ListConnectionsRequest): + request = connection.ListConnectionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListConnectionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ConnectionServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ConnectionServiceClient",) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/pagers.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/pagers.py new file mode 100644 index 000000000000..7c7e806a99a3 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.apigeeconnect_v1.types import connection + + +class ListConnectionsPager: + """A pager for iterating through ``list_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.apigeeconnect_v1.types.ListConnectionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``connections`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListConnections`` requests and continue to iterate + through the ``connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.apigeeconnect_v1.types.ListConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., connection.ListConnectionsResponse], + request: connection.ListConnectionsRequest, + response: connection.ListConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.apigeeconnect_v1.types.ListConnectionsRequest): + The initial request object. + response (google.cloud.apigeeconnect_v1.types.ListConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = connection.ListConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[connection.ListConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[connection.Connection]: + for page in self.pages: + yield from page.connections + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListConnectionsAsyncPager: + """A pager for iterating through ``list_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.apigeeconnect_v1.types.ListConnectionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``connections`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListConnections`` requests and continue to iterate + through the ``connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.apigeeconnect_v1.types.ListConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[connection.ListConnectionsResponse]], + request: connection.ListConnectionsRequest, + response: connection.ListConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.apigeeconnect_v1.types.ListConnectionsRequest): + The initial request object. + response (google.cloud.apigeeconnect_v1.types.ListConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = connection.ListConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[connection.ListConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[connection.Connection]: + async def async_generator(): + async for page in self.pages: + for response in page.connections: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/transports/__init__.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/transports/__init__.py new file mode 100644 index 000000000000..b581fa9d43c2 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ConnectionServiceTransport +from .grpc import ConnectionServiceGrpcTransport +from .grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ConnectionServiceTransport]] +_transport_registry["grpc"] = ConnectionServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ConnectionServiceGrpcAsyncIOTransport + +__all__ = ( + "ConnectionServiceTransport", + "ConnectionServiceGrpcTransport", + "ConnectionServiceGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/transports/base.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/transports/base.py new file mode 100644 index 000000000000..29bb9de07e4b --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/transports/base.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apigeeconnect_v1 import gapic_version as package_version +from google.cloud.apigeeconnect_v1.types import connection + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ConnectionServiceTransport(abc.ABC): + """Abstract transport class for ConnectionService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "apigeeconnect.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_connections: gapic_v1.method.wrap_method( + self.list_connections, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_connections( + self, + ) -> Callable[ + [connection.ListConnectionsRequest], + Union[ + connection.ListConnectionsResponse, + Awaitable[connection.ListConnectionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ConnectionServiceTransport",) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/transports/grpc.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/transports/grpc.py new file mode 100644 index 000000000000..58b45ddd5c97 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/transports/grpc.py @@ -0,0 +1,269 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.cloud.apigeeconnect_v1.types import connection + +from .base import DEFAULT_CLIENT_INFO, ConnectionServiceTransport + + +class ConnectionServiceGrpcTransport(ConnectionServiceTransport): + """gRPC backend transport for ConnectionService. + + Service Interface for the Apigee Connect connection + management APIs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "apigeeconnect.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "apigeeconnect.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_connections( + self, + ) -> Callable[ + [connection.ListConnectionsRequest], connection.ListConnectionsResponse + ]: + r"""Return a callable for the list connections method over gRPC. + + Lists connections that are currently active for the + given Apigee Connect endpoint. + + Returns: + Callable[[~.ListConnectionsRequest], + ~.ListConnectionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_connections" not in self._stubs: + self._stubs["list_connections"] = self.grpc_channel.unary_unary( + "/google.cloud.apigeeconnect.v1.ConnectionService/ListConnections", + request_serializer=connection.ListConnectionsRequest.serialize, + response_deserializer=connection.ListConnectionsResponse.deserialize, + ) + return self._stubs["list_connections"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ConnectionServiceGrpcTransport",) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/transports/grpc_asyncio.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..b3abf0bc7d3b --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/transports/grpc_asyncio.py @@ -0,0 +1,269 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.apigeeconnect_v1.types import connection + +from .base import DEFAULT_CLIENT_INFO, ConnectionServiceTransport +from .grpc import ConnectionServiceGrpcTransport + + +class ConnectionServiceGrpcAsyncIOTransport(ConnectionServiceTransport): + """gRPC AsyncIO backend transport for ConnectionService. + + Service Interface for the Apigee Connect connection + management APIs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "apigeeconnect.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "apigeeconnect.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_connections( + self, + ) -> Callable[ + [connection.ListConnectionsRequest], + Awaitable[connection.ListConnectionsResponse], + ]: + r"""Return a callable for the list connections method over gRPC. + + Lists connections that are currently active for the + given Apigee Connect endpoint. + + Returns: + Callable[[~.ListConnectionsRequest], + Awaitable[~.ListConnectionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_connections" not in self._stubs: + self._stubs["list_connections"] = self.grpc_channel.unary_unary( + "/google.cloud.apigeeconnect.v1.ConnectionService/ListConnections", + request_serializer=connection.ListConnectionsRequest.serialize, + response_deserializer=connection.ListConnectionsResponse.deserialize, + ) + return self._stubs["list_connections"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("ConnectionServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/__init__.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/__init__.py new file mode 100644 index 000000000000..dcb52557ec32 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import TetherAsyncClient +from .client import TetherClient + +__all__ = ( + "TetherClient", + "TetherAsyncClient", +) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/async_client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/async_client.py new file mode 100644 index 000000000000..4f0e531bdfce --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/async_client.py @@ -0,0 +1,312 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + AsyncIterable, + AsyncIterator, + Awaitable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apigeeconnect_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import duration_pb2 # type: ignore + +from google.cloud.apigeeconnect_v1.types import tether + +from .client import TetherClient +from .transports.base import DEFAULT_CLIENT_INFO, TetherTransport +from .transports.grpc_asyncio import TetherGrpcAsyncIOTransport + + +class TetherAsyncClient: + """Tether provides a way for the control plane to send HTTP API + requests to services in data planes that runs in a remote + datacenter without requiring customers to open firewalls on + their runtime plane. + """ + + _client: TetherClient + + DEFAULT_ENDPOINT = TetherClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TetherClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod(TetherClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod( + TetherClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(TetherClient.common_folder_path) + parse_common_folder_path = staticmethod(TetherClient.parse_common_folder_path) + common_organization_path = staticmethod(TetherClient.common_organization_path) + parse_common_organization_path = staticmethod( + TetherClient.parse_common_organization_path + ) + common_project_path = staticmethod(TetherClient.common_project_path) + parse_common_project_path = staticmethod(TetherClient.parse_common_project_path) + common_location_path = staticmethod(TetherClient.common_location_path) + parse_common_location_path = staticmethod(TetherClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TetherAsyncClient: The constructed client. + """ + return TetherClient.from_service_account_info.__func__(TetherAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TetherAsyncClient: The constructed client. + """ + return TetherClient.from_service_account_file.__func__(TetherAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TetherClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> TetherTransport: + """Returns the transport used by the client instance. + + Returns: + TetherTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(TetherClient).get_transport_class, type(TetherClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, TetherTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the tether client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.TetherTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TetherClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + def egress( + self, + requests: Optional[AsyncIterator[tether.EgressResponse]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[tether.EgressRequest]]: + r"""Egress streams egress requests and responses. + Logically, this is not actually a streaming request, but + uses streaming as a mechanism to flip the client-server + relationship of gRPC so that the server can act as a + client. + The listener, the RPC server, accepts connections from + the dialer, the RPC client. + The listener streams http requests and the dialer + streams http responses. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apigeeconnect_v1 + + async def sample_egress(): + # Create a client + client = apigeeconnect_v1.TetherAsyncClient() + + # Initialize request argument(s) + request = apigeeconnect_v1.EgressResponse( + ) + + # This method expects an iterator which contains + # 'apigeeconnect_v1.EgressResponse' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.egress(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + Args: + requests (AsyncIterator[`google.cloud.apigeeconnect_v1.types.EgressResponse`]): + The request object AsyncIterator. gRPC response payload for tether. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.apigeeconnect_v1.types.EgressRequest]: + gRPC request payload for tether. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.egress, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TetherAsyncClient",) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py new file mode 100644 index 000000000000..80e6137eca61 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py @@ -0,0 +1,529 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Iterable, + Iterator, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apigeeconnect_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import duration_pb2 # type: ignore + +from google.cloud.apigeeconnect_v1.types import tether + +from .transports.base import DEFAULT_CLIENT_INFO, TetherTransport +from .transports.grpc import TetherGrpcTransport +from .transports.grpc_asyncio import TetherGrpcAsyncIOTransport + + +class TetherClientMeta(type): + """Metaclass for the Tether client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[TetherTransport]] + _transport_registry["grpc"] = TetherGrpcTransport + _transport_registry["grpc_asyncio"] = TetherGrpcAsyncIOTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[TetherTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TetherClient(metaclass=TetherClientMeta): + """Tether provides a way for the control plane to send HTTP API + requests to services in data planes that runs in a remote + datacenter without requiring customers to open firewalls on + their runtime plane. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "apigeeconnect.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TetherClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TetherClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TetherTransport: + """Returns the transport used by the client instance. + + Returns: + TetherTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TetherTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the tether client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TetherTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TetherTransport): + # transport is a TetherTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def egress( + self, + requests: Optional[Iterator[tether.EgressResponse]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[tether.EgressRequest]: + r"""Egress streams egress requests and responses. + Logically, this is not actually a streaming request, but + uses streaming as a mechanism to flip the client-server + relationship of gRPC so that the server can act as a + client. + The listener, the RPC server, accepts connections from + the dialer, the RPC client. + The listener streams http requests and the dialer + streams http responses. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import apigeeconnect_v1 + + def sample_egress(): + # Create a client + client = apigeeconnect_v1.TetherClient() + + # Initialize request argument(s) + request = apigeeconnect_v1.EgressResponse( + ) + + # This method expects an iterator which contains + # 'apigeeconnect_v1.EgressResponse' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.egress(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + Args: + requests (Iterator[google.cloud.apigeeconnect_v1.types.EgressResponse]): + The request object iterator. gRPC response payload for tether. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.apigeeconnect_v1.types.EgressRequest]: + gRPC request payload for tether. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.egress] + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "TetherClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TetherClient",) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/transports/__init__.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/transports/__init__.py new file mode 100644 index 000000000000..a9825a8815c8 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TetherTransport +from .grpc import TetherGrpcTransport +from .grpc_asyncio import TetherGrpcAsyncIOTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TetherTransport]] +_transport_registry["grpc"] = TetherGrpcTransport +_transport_registry["grpc_asyncio"] = TetherGrpcAsyncIOTransport + +__all__ = ( + "TetherTransport", + "TetherGrpcTransport", + "TetherGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/transports/base.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/transports/base.py new file mode 100644 index 000000000000..9e280d198a46 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/transports/base.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.apigeeconnect_v1 import gapic_version as package_version +from google.cloud.apigeeconnect_v1.types import tether + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class TetherTransport(abc.ABC): + """Abstract transport class for Tether.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "apigeeconnect.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.egress: gapic_v1.method.wrap_method( + self.egress, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def egress( + self, + ) -> Callable[ + [tether.EgressResponse], + Union[tether.EgressRequest, Awaitable[tether.EgressRequest]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("TetherTransport",) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/transports/grpc.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/transports/grpc.py new file mode 100644 index 000000000000..0236d0dac06a --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/transports/grpc.py @@ -0,0 +1,274 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.cloud.apigeeconnect_v1.types import tether + +from .base import DEFAULT_CLIENT_INFO, TetherTransport + + +class TetherGrpcTransport(TetherTransport): + """gRPC backend transport for Tether. + + Tether provides a way for the control plane to send HTTP API + requests to services in data planes that runs in a remote + datacenter without requiring customers to open firewalls on + their runtime plane. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "apigeeconnect.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "apigeeconnect.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def egress(self) -> Callable[[tether.EgressResponse], tether.EgressRequest]: + r"""Return a callable for the egress method over gRPC. + + Egress streams egress requests and responses. + Logically, this is not actually a streaming request, but + uses streaming as a mechanism to flip the client-server + relationship of gRPC so that the server can act as a + client. + The listener, the RPC server, accepts connections from + the dialer, the RPC client. + The listener streams http requests and the dialer + streams http responses. + + Returns: + Callable[[~.EgressResponse], + ~.EgressRequest]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "egress" not in self._stubs: + self._stubs["egress"] = self.grpc_channel.stream_stream( + "/google.cloud.apigeeconnect.v1.Tether/Egress", + request_serializer=tether.EgressResponse.serialize, + response_deserializer=tether.EgressRequest.deserialize, + ) + return self._stubs["egress"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("TetherGrpcTransport",) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/transports/grpc_asyncio.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/transports/grpc_asyncio.py new file mode 100644 index 000000000000..7cc5780b7b16 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/transports/grpc_asyncio.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.apigeeconnect_v1.types import tether + +from .base import DEFAULT_CLIENT_INFO, TetherTransport +from .grpc import TetherGrpcTransport + + +class TetherGrpcAsyncIOTransport(TetherTransport): + """gRPC AsyncIO backend transport for Tether. + + Tether provides a way for the control plane to send HTTP API + requests to services in data planes that runs in a remote + datacenter without requiring customers to open firewalls on + their runtime plane. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "apigeeconnect.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "apigeeconnect.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def egress( + self, + ) -> Callable[[tether.EgressResponse], Awaitable[tether.EgressRequest]]: + r"""Return a callable for the egress method over gRPC. + + Egress streams egress requests and responses. + Logically, this is not actually a streaming request, but + uses streaming as a mechanism to flip the client-server + relationship of gRPC so that the server can act as a + client. + The listener, the RPC server, accepts connections from + the dialer, the RPC client. + The listener streams http requests and the dialer + streams http responses. + + Returns: + Callable[[~.EgressResponse], + Awaitable[~.EgressRequest]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "egress" not in self._stubs: + self._stubs["egress"] = self.grpc_channel.stream_stream( + "/google.cloud.apigeeconnect.v1.Tether/Egress", + request_serializer=tether.EgressResponse.serialize, + response_deserializer=tether.EgressRequest.deserialize, + ) + return self._stubs["egress"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("TetherGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/types/__init__.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/types/__init__.py new file mode 100644 index 000000000000..7f12fbd52f0d --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/types/__init__.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .connection import ( + Cluster, + Connection, + ListConnectionsRequest, + ListConnectionsResponse, +) +from .tether import ( + Action, + EgressRequest, + EgressResponse, + Header, + HttpRequest, + HttpResponse, + Payload, + Scheme, + StreamInfo, + TetherEndpoint, + Url, +) + +__all__ = ( + "Cluster", + "Connection", + "ListConnectionsRequest", + "ListConnectionsResponse", + "EgressRequest", + "EgressResponse", + "Header", + "HttpRequest", + "HttpResponse", + "Payload", + "StreamInfo", + "Url", + "Action", + "Scheme", + "TetherEndpoint", +) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/types/connection.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/types/connection.py new file mode 100644 index 000000000000..6251dc494508 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/types/connection.py @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.apigeeconnect.v1", + manifest={ + "ListConnectionsRequest", + "ListConnectionsResponse", + "Connection", + "Cluster", + }, +) + + +class ListConnectionsRequest(proto.Message): + r"""The request for [ListConnections][Management.ListConnections]. + + Attributes: + parent (str): + Required. Parent name of the form: + ``projects/{project_number or project_id}/endpoints/{endpoint}``. + page_size (int): + The maximum number of connections to return. + The service may return fewer than this value. If + unspecified, at most 100 connections will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + A page token, received from a previous ``ListConnections`` + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListConnections`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListConnectionsResponse(proto.Message): + r"""The response for [ListConnections][Management.ListConnections]. + + Attributes: + connections (MutableSequence[google.cloud.apigeeconnect_v1.types.Connection]): + A list of clients. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + connections: MutableSequence["Connection"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Connection", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Connection(proto.Message): + r""" + + Attributes: + endpoint (str): + The endpoint that the connection is made against. Format: + ``projects/{project_number}/endpoints/{endpoint}`` + cluster (google.cloud.apigeeconnect_v1.types.Cluster): + Cluster information. + stream_count (int): + The count of streams. + """ + + endpoint: str = proto.Field( + proto.STRING, + number=1, + ) + cluster: "Cluster" = proto.Field( + proto.MESSAGE, + number=2, + message="Cluster", + ) + stream_count: int = proto.Field( + proto.INT32, + number=3, + ) + + +class Cluster(proto.Message): + r""" + + Attributes: + name (str): + The name of the cluster. + region (str): + The region of the cluster. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/types/tether.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/types/tether.py new file mode 100644 index 000000000000..859958f89868 --- /dev/null +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/types/tether.py @@ -0,0 +1,395 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.apigeeconnect.v1", + manifest={ + "Action", + "TetherEndpoint", + "Scheme", + "EgressRequest", + "Payload", + "StreamInfo", + "EgressResponse", + "HttpRequest", + "Url", + "Header", + "HttpResponse", + }, +) + + +class Action(proto.Enum): + r"""The action taken by agent. + + Values: + ACTION_UNSPECIFIED (0): + Unspecified Action. + OPEN_NEW_STREAM (1): + Indicates that agent should open a new + stream. + """ + ACTION_UNSPECIFIED = 0 + OPEN_NEW_STREAM = 1 + + +class TetherEndpoint(proto.Enum): + r"""Endpoint indicates where the messages will be delivered. + + Values: + TETHER_ENDPOINT_UNSPECIFIED (0): + Unspecified tether endpoint. + APIGEE_MART (1): + Apigee MART endpoint. + APIGEE_RUNTIME (2): + Apigee Runtime endpoint. + APIGEE_MINT_RATING (3): + Apigee Mint Rating endpoint. + """ + TETHER_ENDPOINT_UNSPECIFIED = 0 + APIGEE_MART = 1 + APIGEE_RUNTIME = 2 + APIGEE_MINT_RATING = 3 + + +class Scheme(proto.Enum): + r"""HTTP Scheme. + + Values: + SCHEME_UNSPECIFIED (0): + Unspecified scheme. + HTTPS (1): + HTTPS protocol. + """ + SCHEME_UNSPECIFIED = 0 + HTTPS = 1 + + +class EgressRequest(proto.Message): + r"""gRPC request payload for tether. + + Attributes: + id (str): + Unique identifier for the request. + payload (google.cloud.apigeeconnect_v1.types.Payload): + Actual payload to send to agent. + endpoint (google.cloud.apigeeconnect_v1.types.TetherEndpoint): + Tether Endpoint. + project (str): + GCP Project. Format: ``projects/{project_number}``. + trace_id (str): + Unique identifier for clients to trace their + request/response. + timeout (google.protobuf.duration_pb2.Duration): + Timeout for the HTTP request. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + payload: "Payload" = proto.Field( + proto.MESSAGE, + number=2, + message="Payload", + ) + endpoint: "TetherEndpoint" = proto.Field( + proto.ENUM, + number=3, + enum="TetherEndpoint", + ) + project: str = proto.Field( + proto.STRING, + number=4, + ) + trace_id: str = proto.Field( + proto.STRING, + number=5, + ) + timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=6, + message=duration_pb2.Duration, + ) + + +class Payload(proto.Message): + r"""Payload for EgressRequest. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + http_request (google.cloud.apigeeconnect_v1.types.HttpRequest): + The HttpRequest proto. + + This field is a member of `oneof`_ ``kind``. + stream_info (google.cloud.apigeeconnect_v1.types.StreamInfo): + The information of stream. + + This field is a member of `oneof`_ ``kind``. + action (google.cloud.apigeeconnect_v1.types.Action): + The action taken by agent. + + This field is a member of `oneof`_ ``kind``. + """ + + http_request: "HttpRequest" = proto.Field( + proto.MESSAGE, + number=1, + oneof="kind", + message="HttpRequest", + ) + stream_info: "StreamInfo" = proto.Field( + proto.MESSAGE, + number=2, + oneof="kind", + message="StreamInfo", + ) + action: "Action" = proto.Field( + proto.ENUM, + number=3, + oneof="kind", + enum="Action", + ) + + +class StreamInfo(proto.Message): + r"""The Information of bi-directional stream. + + Attributes: + id (str): + Unique identifier for the stream. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + + +class EgressResponse(proto.Message): + r"""gRPC response payload for tether. + + Attributes: + id (str): + Unique identifier for the response. Matches + the EgressRequest's id. + http_response (google.cloud.apigeeconnect_v1.types.HttpResponse): + HttpResponse. + status (google.rpc.status_pb2.Status): + Errors from application when handling the + http request. + project (str): + GCP Project. Format: ``projects/{project_number}``. + trace_id (str): + Unique identifier for clients to trace their + request/response. Matches the EgressRequest's + trace id + endpoint (google.cloud.apigeeconnect_v1.types.TetherEndpoint): + Tether Endpoint. + name (str): + Name is the full resource path of endpoint. Format: + ``projects/{project_number or project_id}/endpoints/{endpoint}`` + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + http_response: "HttpResponse" = proto.Field( + proto.MESSAGE, + number=2, + message="HttpResponse", + ) + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=3, + message=status_pb2.Status, + ) + project: str = proto.Field( + proto.STRING, + number=4, + ) + trace_id: str = proto.Field( + proto.STRING, + number=5, + ) + endpoint: "TetherEndpoint" = proto.Field( + proto.ENUM, + number=6, + enum="TetherEndpoint", + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class HttpRequest(proto.Message): + r"""The proto definition of http request. + + Attributes: + id (str): + A unique identifier for the request. + method (str): + The HTTP request method. + Valid methods: "GET", "HEAD", "POST", "PUT", + "PATCH","DELETE". + url (google.cloud.apigeeconnect_v1.types.Url): + The HTTP request URL. + headers (MutableSequence[google.cloud.apigeeconnect_v1.types.Header]): + The HTTP request headers. + body (bytes): + HTTP request body. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + method: str = proto.Field( + proto.STRING, + number=2, + ) + url: "Url" = proto.Field( + proto.MESSAGE, + number=3, + message="Url", + ) + headers: MutableSequence["Header"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="Header", + ) + body: bytes = proto.Field( + proto.BYTES, + number=5, + ) + + +class Url(proto.Message): + r"""The proto definition of url. A url represents a URL and the general + form represented is: + + ``[scheme://][google.cloud.apigeeconnect.v1.Url.host][path]`` + + Attributes: + scheme (google.cloud.apigeeconnect_v1.types.Scheme): + Scheme. + host (str): + Host or Host:Port. + path (str): + Path starts with ``/``. + """ + + scheme: "Scheme" = proto.Field( + proto.ENUM, + number=1, + enum="Scheme", + ) + host: str = proto.Field( + proto.STRING, + number=2, + ) + path: str = proto.Field( + proto.STRING, + number=3, + ) + + +class Header(proto.Message): + r"""The http headers. + + Attributes: + key (str): + + values (MutableSequence[str]): + + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class HttpResponse(proto.Message): + r"""The proto definition of http response. + + Attributes: + id (str): + A unique identifier that matches the request + ID. + status (str): + Status of http response, e.g. "200 OK". + status_code (int): + Status code of http response, e.g. 200. + body (bytes): + The HTTP 1.1 response body. + headers (MutableSequence[google.cloud.apigeeconnect_v1.types.Header]): + The HTTP response headers. + content_length (int): + Content length records the length of the + associated content. The value -1 indicates that + the length is unknown. Unless http method is + "HEAD", values >= 0 indicate that the given + number of bytes may be read from Body. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + status: str = proto.Field( + proto.STRING, + number=2, + ) + status_code: int = proto.Field( + proto.INT32, + number=3, + ) + body: bytes = proto.Field( + proto.BYTES, + number=4, + ) + headers: MutableSequence["Header"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="Header", + ) + content_length: int = proto.Field( + proto.INT64, + number=6, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-apigee-connect/mypy.ini b/packages/google-cloud-apigee-connect/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-apigee-connect/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-apigee-connect/noxfile.py b/packages/google-cloud-apigee-connect/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-cloud-apigee-connect/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-cloud-apigee-connect/renovate.json b/packages/google-cloud-apigee-connect/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-apigee-connect/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-apigee-connect/scripts/decrypt-secrets.sh b/packages/google-cloud-apigee-connect/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-cloud-apigee-connect/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-apigee-connect/scripts/fixup_apigeeconnect_v1_keywords.py b/packages/google-cloud-apigee-connect/scripts/fixup_apigeeconnect_v1_keywords.py new file mode 100644 index 000000000000..01e585575467 --- /dev/null +++ b/packages/google-cloud-apigee-connect/scripts/fixup_apigeeconnect_v1_keywords.py @@ -0,0 +1,177 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class apigeeconnectCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'egress': ('id', 'http_response', 'status', 'project', 'trace_id', 'endpoint', 'name', ), + 'list_connections': ('parent', 'page_size', 'page_token', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=apigeeconnectCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the apigeeconnect client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-apigee-connect/scripts/readme-gen/readme_gen.py b/packages/google-cloud-apigee-connect/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..91b59676bfc7 --- /dev/null +++ b/packages/google-cloud-apigee-connect/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template('README.tmpl.rst') + + +def get_help(file): + return subprocess.check_output(['python', file, '--help']).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('source') + parser.add_argument('--destination', default='README.rst') + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals['get_help'] = get_help + + with io.open(source, 'r') as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, 'w') as f: + f.write(output) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-apigee-connect/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-apigee-connect/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-apigee-connect/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-apigee-connect/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-apigee-connect/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-apigee-connect/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-apigee-connect/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-apigee-connect/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-apigee-connect/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-apigee-connect/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-apigee-connect/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-apigee-connect/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-apigee-connect/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-apigee-connect/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-apigee-connect/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-apigee-connect/setup.cfg b/packages/google-cloud-apigee-connect/setup.cfg new file mode 100644 index 000000000000..c3a2b39f6528 --- /dev/null +++ b/packages/google-cloud-apigee-connect/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-apigee-connect/setup.py b/packages/google-cloud-apigee-connect/setup.py new file mode 100644 index 000000000000..afb578c46cc9 --- /dev/null +++ b/packages/google-cloud-apigee-connect/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-apigee-connect" + + +description = "Google Cloud Apigee Connect API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/apigeeconnect/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-apigee-connect/testing/.gitignore b/packages/google-cloud-apigee-connect/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-apigee-connect/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-apigee-connect/testing/constraints-3.10.txt b/packages/google-cloud-apigee-connect/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-apigee-connect/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-apigee-connect/testing/constraints-3.11.txt b/packages/google-cloud-apigee-connect/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-apigee-connect/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-apigee-connect/testing/constraints-3.12.txt b/packages/google-cloud-apigee-connect/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-apigee-connect/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-apigee-connect/testing/constraints-3.7.txt b/packages/google-cloud-apigee-connect/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-apigee-connect/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-apigee-connect/testing/constraints-3.8.txt b/packages/google-cloud-apigee-connect/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-apigee-connect/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-apigee-connect/testing/constraints-3.9.txt b/packages/google-cloud-apigee-connect/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-apigee-connect/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-apigee-connect/tests/__init__.py b/packages/google-cloud-apigee-connect/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-apigee-connect/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-apigee-connect/tests/unit/__init__.py b/packages/google-cloud-apigee-connect/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-apigee-connect/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-apigee-connect/tests/unit/gapic/__init__.py b/packages/google-cloud-apigee-connect/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-apigee-connect/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/__init__.py b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py new file mode 100644 index 000000000000..1071ee324216 --- /dev/null +++ b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py @@ -0,0 +1,1826 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest + +from google.cloud.apigeeconnect_v1.services.connection_service import ( + ConnectionServiceAsyncClient, + ConnectionServiceClient, + pagers, + transports, +) +from google.cloud.apigeeconnect_v1.types import connection + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ConnectionServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ConnectionServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ConnectionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ConnectionServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ConnectionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ConnectionServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ConnectionServiceClient, "grpc"), + (ConnectionServiceAsyncClient, "grpc_asyncio"), + ], +) +def test_connection_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("apigeeconnect.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ConnectionServiceGrpcTransport, "grpc"), + (transports.ConnectionServiceGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_connection_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ConnectionServiceClient, "grpc"), + (ConnectionServiceAsyncClient, "grpc_asyncio"), + ], +) +def test_connection_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("apigeeconnect.googleapis.com:443") + + +def test_connection_service_client_get_transport_class(): + transport = ConnectionServiceClient.get_transport_class() + available_transports = [ + transports.ConnectionServiceGrpcTransport, + ] + assert transport in available_transports + + transport = ConnectionServiceClient.get_transport_class("grpc") + assert transport == transports.ConnectionServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport, "grpc"), + ( + ConnectionServiceAsyncClient, + transports.ConnectionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + ConnectionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConnectionServiceClient), +) +@mock.patch.object( + ConnectionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConnectionServiceAsyncClient), +) +def test_connection_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ConnectionServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ConnectionServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ConnectionServiceClient, + transports.ConnectionServiceGrpcTransport, + "grpc", + "true", + ), + ( + ConnectionServiceAsyncClient, + transports.ConnectionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + ConnectionServiceClient, + transports.ConnectionServiceGrpcTransport, + "grpc", + "false", + ), + ( + ConnectionServiceAsyncClient, + transports.ConnectionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + ConnectionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConnectionServiceClient), +) +@mock.patch.object( + ConnectionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConnectionServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_connection_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ConnectionServiceClient, ConnectionServiceAsyncClient] +) +@mock.patch.object( + ConnectionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConnectionServiceClient), +) +@mock.patch.object( + ConnectionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConnectionServiceAsyncClient), +) +def test_connection_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport, "grpc"), + ( + ConnectionServiceAsyncClient, + transports.ConnectionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_connection_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ConnectionServiceClient, + transports.ConnectionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ConnectionServiceAsyncClient, + transports.ConnectionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_connection_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_connection_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.apigeeconnect_v1.services.connection_service.transports.ConnectionServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ConnectionServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ConnectionServiceClient, + transports.ConnectionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ConnectionServiceAsyncClient, + transports.ConnectionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_connection_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "apigeeconnect.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="apigeeconnect.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + connection.ListConnectionsRequest, + dict, + ], +) +def test_list_connections(request_type, transport: str = "grpc"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = connection.ListConnectionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == connection.ListConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_connections_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + client.list_connections() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == connection.ListConnectionsRequest() + + +@pytest.mark.asyncio +async def test_list_connections_async( + transport: str = "grpc_asyncio", request_type=connection.ListConnectionsRequest +): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + connection.ListConnectionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == connection.ListConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_connections_async_from_dict(): + await test_list_connections_async(request_type=dict) + + +def test_list_connections_field_headers(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = connection.ListConnectionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + call.return_value = connection.ListConnectionsResponse() + client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_connections_field_headers_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = connection.ListConnectionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + connection.ListConnectionsResponse() + ) + await client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_connections_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = connection.ListConnectionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_connections( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_connections_flattened_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_connections( + connection.ListConnectionsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_connections_flattened_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = connection.ListConnectionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + connection.ListConnectionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_connections( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_connections_flattened_error_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_connections( + connection.ListConnectionsRequest(), + parent="parent_value", + ) + + +def test_list_connections_pager(transport_name: str = "grpc"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + connection.Connection(), + ], + next_page_token="abc", + ), + connection.ListConnectionsResponse( + connections=[], + next_page_token="def", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + ], + next_page_token="ghi", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_connections(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, connection.Connection) for i in results) + + +def test_list_connections_pages(transport_name: str = "grpc"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + connection.Connection(), + ], + next_page_token="abc", + ), + connection.ListConnectionsResponse( + connections=[], + next_page_token="def", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + ], + next_page_token="ghi", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + ], + ), + RuntimeError, + ) + pages = list(client.list_connections(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_connections_async_pager(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + connection.Connection(), + ], + next_page_token="abc", + ), + connection.ListConnectionsResponse( + connections=[], + next_page_token="def", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + ], + next_page_token="ghi", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_connections( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, connection.Connection) for i in responses) + + +@pytest.mark.asyncio +async def test_list_connections_async_pages(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + connection.Connection(), + ], + next_page_token="abc", + ), + connection.ListConnectionsResponse( + connections=[], + next_page_token="def", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + ], + next_page_token="ghi", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_connections(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ConnectionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ConnectionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConnectionServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ConnectionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConnectionServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConnectionServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ConnectionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConnectionServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConnectionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ConnectionServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConnectionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ConnectionServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConnectionServiceGrpcTransport, + transports.ConnectionServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = ConnectionServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ConnectionServiceGrpcTransport, + ) + + +def test_connection_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ConnectionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_connection_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apigeeconnect_v1.services.connection_service.transports.ConnectionServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ConnectionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("list_connections",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_connection_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apigeeconnect_v1.services.connection_service.transports.ConnectionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConnectionServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_connection_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apigeeconnect_v1.services.connection_service.transports.ConnectionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConnectionServiceTransport() + adc.assert_called_once() + + +def test_connection_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ConnectionServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConnectionServiceGrpcTransport, + transports.ConnectionServiceGrpcAsyncIOTransport, + ], +) +def test_connection_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConnectionServiceGrpcTransport, + transports.ConnectionServiceGrpcAsyncIOTransport, + ], +) +def test_connection_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConnectionServiceGrpcTransport, grpc_helpers), + (transports.ConnectionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_connection_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "apigeeconnect.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="apigeeconnect.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConnectionServiceGrpcTransport, + transports.ConnectionServiceGrpcAsyncIOTransport, + ], +) +def test_connection_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_connection_service_host_no_port(transport_name): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apigeeconnect.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ("apigeeconnect.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_connection_service_host_with_port(transport_name): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apigeeconnect.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ("apigeeconnect.googleapis.com:8000") + + +def test_connection_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConnectionServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_connection_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConnectionServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConnectionServiceGrpcTransport, + transports.ConnectionServiceGrpcAsyncIOTransport, + ], +) +def test_connection_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConnectionServiceGrpcTransport, + transports.ConnectionServiceGrpcAsyncIOTransport, + ], +) +def test_connection_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_endpoint_path(): + project = "squid" + endpoint = "clam" + expected = "projects/{project}/endpoints/{endpoint}".format( + project=project, + endpoint=endpoint, + ) + actual = ConnectionServiceClient.endpoint_path(project, endpoint) + assert expected == actual + + +def test_parse_endpoint_path(): + expected = { + "project": "whelk", + "endpoint": "octopus", + } + path = ConnectionServiceClient.endpoint_path(**expected) + + # Check that the path construction is reversible. + actual = ConnectionServiceClient.parse_endpoint_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ConnectionServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = ConnectionServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ConnectionServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ConnectionServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = ConnectionServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ConnectionServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ConnectionServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = ConnectionServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ConnectionServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = ConnectionServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = ConnectionServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ConnectionServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ConnectionServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = ConnectionServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ConnectionServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ConnectionServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ConnectionServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ConnectionServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport), + ( + ConnectionServiceAsyncClient, + transports.ConnectionServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py new file mode 100644 index 000000000000..3774d7e9e8ec --- /dev/null +++ b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py @@ -0,0 +1,1382 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest + +from google.cloud.apigeeconnect_v1.services.tether import ( + TetherAsyncClient, + TetherClient, + transports, +) +from google.cloud.apigeeconnect_v1.types import tether + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TetherClient._get_default_mtls_endpoint(None) is None + assert TetherClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + TetherClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + ) + assert ( + TetherClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + TetherClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert TetherClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TetherClient, "grpc"), + (TetherAsyncClient, "grpc_asyncio"), + ], +) +def test_tether_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("apigeeconnect.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.TetherGrpcTransport, "grpc"), + (transports.TetherGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_tether_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TetherClient, "grpc"), + (TetherAsyncClient, "grpc_asyncio"), + ], +) +def test_tether_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("apigeeconnect.googleapis.com:443") + + +def test_tether_client_get_transport_class(): + transport = TetherClient.get_transport_class() + available_transports = [ + transports.TetherGrpcTransport, + ] + assert transport in available_transports + + transport = TetherClient.get_transport_class("grpc") + assert transport == transports.TetherGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (TetherClient, transports.TetherGrpcTransport, "grpc"), + (TetherAsyncClient, transports.TetherGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +@mock.patch.object( + TetherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TetherClient) +) +@mock.patch.object( + TetherAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TetherAsyncClient) +) +def test_tether_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TetherClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TetherClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (TetherClient, transports.TetherGrpcTransport, "grpc", "true"), + ( + TetherAsyncClient, + transports.TetherGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (TetherClient, transports.TetherGrpcTransport, "grpc", "false"), + ( + TetherAsyncClient, + transports.TetherGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + TetherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TetherClient) +) +@mock.patch.object( + TetherAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TetherAsyncClient) +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_tether_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [TetherClient, TetherAsyncClient]) +@mock.patch.object( + TetherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TetherClient) +) +@mock.patch.object( + TetherAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TetherAsyncClient) +) +def test_tether_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (TetherClient, transports.TetherGrpcTransport, "grpc"), + (TetherAsyncClient, transports.TetherGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_tether_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (TetherClient, transports.TetherGrpcTransport, "grpc", grpc_helpers), + ( + TetherAsyncClient, + transports.TetherGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_tether_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_tether_client_client_options_from_dict(): + with mock.patch( + "google.cloud.apigeeconnect_v1.services.tether.transports.TetherGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = TetherClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (TetherClient, transports.TetherGrpcTransport, "grpc", grpc_helpers), + ( + TetherAsyncClient, + transports.TetherGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_tether_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "apigeeconnect.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="apigeeconnect.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tether.EgressResponse, + dict, + ], +) +def test_egress(request_type, transport: str = "grpc"): + client = TetherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.egress), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = iter([tether.EgressRequest()]) + response = client.egress(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, tether.EgressRequest) + + +@pytest.mark.asyncio +async def test_egress_async( + transport: str = "grpc_asyncio", request_type=tether.EgressResponse +): + client = TetherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.egress), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[tether.EgressRequest()]) + response = await client.egress(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, tether.EgressRequest) + + +@pytest.mark.asyncio +async def test_egress_async_from_dict(): + await test_egress_async(request_type=dict) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TetherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TetherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TetherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TetherClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TetherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TetherClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TetherClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TetherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TetherClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TetherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TetherClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TetherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TetherGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TetherGrpcTransport, + transports.TetherGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = TetherClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TetherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TetherGrpcTransport, + ) + + +def test_tether_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TetherTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_tether_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apigeeconnect_v1.services.tether.transports.TetherTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.TetherTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("egress",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_tether_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apigeeconnect_v1.services.tether.transports.TetherTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TetherTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_tether_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apigeeconnect_v1.services.tether.transports.TetherTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TetherTransport() + adc.assert_called_once() + + +def test_tether_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TetherClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TetherGrpcTransport, + transports.TetherGrpcAsyncIOTransport, + ], +) +def test_tether_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TetherGrpcTransport, + transports.TetherGrpcAsyncIOTransport, + ], +) +def test_tether_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TetherGrpcTransport, grpc_helpers), + (transports.TetherGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_tether_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "apigeeconnect.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="apigeeconnect.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.TetherGrpcTransport, transports.TetherGrpcAsyncIOTransport], +) +def test_tether_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_tether_host_no_port(transport_name): + client = TetherClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apigeeconnect.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ("apigeeconnect.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_tether_host_with_port(transport_name): + client = TetherClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apigeeconnect.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ("apigeeconnect.googleapis.com:8000") + + +def test_tether_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TetherGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_tether_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TetherGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.TetherGrpcTransport, transports.TetherGrpcAsyncIOTransport], +) +def test_tether_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.TetherGrpcTransport, transports.TetherGrpcAsyncIOTransport], +) +def test_tether_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = TetherClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = TetherClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TetherClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = TetherClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = TetherClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TetherClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = TetherClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = TetherClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TetherClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = TetherClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = TetherClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TetherClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = TetherClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = TetherClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TetherClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.TetherTransport, "_prep_wrapped_messages" + ) as prep: + client = TetherClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.TetherTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = TetherClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = TetherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = TetherClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = TetherClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (TetherClient, transports.TetherGrpcTransport), + (TetherAsyncClient, transports.TetherGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-bigquery-biglake/.OwlBot.yaml b/packages/google-cloud-bigquery-biglake/.OwlBot.yaml new file mode 100644 index 000000000000..248acda952fb --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/.OwlBot.yaml @@ -0,0 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/bigquery/biglake/(.*)/.*-py + dest: /owl-bot-staging/google-cloud-bigquery-biglake/$1 diff --git a/packages/google-cloud-bigquery-biglake/.coveragerc b/packages/google-cloud-bigquery-biglake/.coveragerc new file mode 100644 index 000000000000..667ff6432af3 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/bigquery/biglake/__init__.py + google/cloud/bigquery/biglake/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-bigquery-biglake/.flake8 b/packages/google-cloud-bigquery-biglake/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-bigquery-biglake/.gitignore b/packages/google-cloud-bigquery-biglake/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-bigquery-biglake/.repo-metadata.json b/packages/google-cloud-bigquery-biglake/.repo-metadata.json new file mode 100644 index 000000000000..4a06172a1842 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "biglake", + "name_pretty": "BigLake API", + "api_description": "BigLake API", + "product_documentation": "https://cloud.google.com/bigquery/docs/iceberg-tables#create-using-biglake-metastore", + "client_documentation": "https://cloud.google.com/python/docs/reference/biglake/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=187149&template=1019829", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-bigquery-biglake", + "api_id": "biglake.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "biglake" +} diff --git a/packages/google-cloud-bigquery-biglake/CHANGELOG.md b/packages/google-cloud-bigquery-biglake/CHANGELOG.md new file mode 100644 index 000000000000..fd8c83f1433a --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/CHANGELOG.md @@ -0,0 +1,32 @@ +# Changelog + +## [0.3.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-biglake-v0.2.0...google-cloud-bigquery-biglake-v0.3.0) (2023-04-12) + + +### Features + +* add BigQuery BigLake v1 API ([02e6b75](https://github.com/googleapis/google-cloud-python/commit/02e6b7504844110a3d9967fa77908a844a026e1f)) +* set BigQuery BigLake v1 as the default import ([02e6b75](https://github.com/googleapis/google-cloud-python/commit/02e6b7504844110a3d9967fa77908a844a026e1f)) + +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-biglake-v0.1.1...google-cloud-bigquery-biglake-v0.2.0) (2023-04-06) + + +### Features + +* add RenameTable, etag, and ListTables view ([#11050](https://github.com/googleapis/google-cloud-python/issues/11050)) ([b791900](https://github.com/googleapis/google-cloud-python/commit/b7919001ccd7773307b806b5fff68166352e01b2)) + +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-biglake-v0.1.0...google-cloud-bigquery-biglake-v0.1.1) (2023-03-25) + + +### Documentation + +* Fix formatting of request arg in docstring ([#10867](https://github.com/googleapis/google-cloud-python/issues/10867)) ([d34a425](https://github.com/googleapis/google-cloud-python/commit/d34a425f7d0f02bebaf20d24b725b8c25c699697)) + +## 0.1.0 (2023-03-17) + + +### Features + +* add initial files for google.cloud.bigquery.biglake.v1alpha1 ([#10861](https://github.com/googleapis/google-cloud-python/issues/10861)) ([06db74d](https://github.com/googleapis/google-cloud-python/commit/06db74d8af7cb3de73e981996d851f3bf68946a8)) + +## Changelog diff --git a/packages/google-cloud-bigquery-biglake/CODE_OF_CONDUCT.md b/packages/google-cloud-bigquery-biglake/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-bigquery-biglake/CONTRIBUTING.rst b/packages/google-cloud-bigquery-biglake/CONTRIBUTING.rst new file mode 100644 index 000000000000..4ac941584062 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/CONTRIBUTING.rst @@ -0,0 +1,281 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-bigquery-biglake + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +We also explicitly decided to support Python 3 beginning with version 3.7. +Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-bigquery-biglake/LICENSE b/packages/google-cloud-bigquery-biglake/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-bigquery-biglake/MANIFEST.in b/packages/google-cloud-bigquery-biglake/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-bigquery-biglake/README.rst b/packages/google-cloud-bigquery-biglake/README.rst new file mode 100644 index 000000000000..4793bcabf1ea --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/README.rst @@ -0,0 +1,107 @@ +Python Client for BigLake API +============================= + +|preview| |pypi| |versions| + +`BigLake API`_: BigLake API + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-biglake.svg + :target: https://pypi.org/project/google-cloud-bigquery-biglake/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery-biglake.svg + :target: https://pypi.org/project/google-cloud-bigquery-biglake/ +.. _BigLake API: https://cloud.google.com/bigquery/docs/iceberg-tables#create-using-biglake-metastore +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/biglake/latest +.. _Product Documentation: https://cloud.google.com/bigquery/docs/iceberg-tables#create-using-biglake-metastore + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the BigLake API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the BigLake API.: https://cloud.google.com/bigquery/docs/iceberg-tables#create-using-biglake-metastore +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-bigquery-biglake + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-bigquery-biglake + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for BigLake API + to see other available methods on the client. +- Read the `BigLake API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _BigLake API Product documentation: https://cloud.google.com/bigquery/docs/iceberg-tables#create-using-biglake-metastore +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-bigquery-biglake/SECURITY.md b/packages/google-cloud-bigquery-biglake/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-bigquery-biglake/docs/CHANGELOG.md b/packages/google-cloud-bigquery-biglake/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-bigquery-biglake/docs/README.rst b/packages/google-cloud-bigquery-biglake/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-bigquery-biglake/docs/_static/custom.css b/packages/google-cloud-bigquery-biglake/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-bigquery-biglake/docs/_templates/layout.html b/packages/google-cloud-bigquery-biglake/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-bigquery-biglake/docs/biglake_v1/metastore_service.rst b/packages/google-cloud-bigquery-biglake/docs/biglake_v1/metastore_service.rst new file mode 100644 index 000000000000..4f8f1d55fca3 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/docs/biglake_v1/metastore_service.rst @@ -0,0 +1,10 @@ +MetastoreService +---------------------------------- + +.. automodule:: google.cloud.bigquery.biglake_v1.services.metastore_service + :members: + :inherited-members: + +.. automodule:: google.cloud.bigquery.biglake_v1.services.metastore_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-bigquery-biglake/docs/biglake_v1/services.rst b/packages/google-cloud-bigquery-biglake/docs/biglake_v1/services.rst new file mode 100644 index 000000000000..a7c177be5e84 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/docs/biglake_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Bigquery Biglake v1 API +================================================= +.. toctree:: + :maxdepth: 2 + + metastore_service diff --git a/packages/google-cloud-bigquery-biglake/docs/biglake_v1/types.rst b/packages/google-cloud-bigquery-biglake/docs/biglake_v1/types.rst new file mode 100644 index 000000000000..1d250b5d3ade --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/docs/biglake_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Bigquery Biglake v1 API +============================================== + +.. automodule:: google.cloud.bigquery.biglake_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-bigquery-biglake/docs/biglake_v1alpha1/metastore_service.rst b/packages/google-cloud-bigquery-biglake/docs/biglake_v1alpha1/metastore_service.rst new file mode 100644 index 000000000000..a662d169772e --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/docs/biglake_v1alpha1/metastore_service.rst @@ -0,0 +1,10 @@ +MetastoreService +---------------------------------- + +.. automodule:: google.cloud.bigquery.biglake_v1alpha1.services.metastore_service + :members: + :inherited-members: + +.. automodule:: google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-bigquery-biglake/docs/biglake_v1alpha1/services.rst b/packages/google-cloud-bigquery-biglake/docs/biglake_v1alpha1/services.rst new file mode 100644 index 000000000000..2364a8885eb9 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/docs/biglake_v1alpha1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Bigquery Biglake v1alpha1 API +======================================================= +.. toctree:: + :maxdepth: 2 + + metastore_service diff --git a/packages/google-cloud-bigquery-biglake/docs/biglake_v1alpha1/types.rst b/packages/google-cloud-bigquery-biglake/docs/biglake_v1alpha1/types.rst new file mode 100644 index 000000000000..482018317af9 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/docs/biglake_v1alpha1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Bigquery Biglake v1alpha1 API +==================================================== + +.. automodule:: google.cloud.bigquery.biglake_v1alpha1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-bigquery-biglake/docs/conf.py b/packages/google-cloud-bigquery-biglake/docs/conf.py new file mode 100644 index 000000000000..85e36171ff0d --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-bigquery-biglake documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-bigquery-biglake" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-bigquery-biglake", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-bigquery-biglake-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-bigquery-biglake.tex", + "google-cloud-bigquery-biglake Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-bigquery-biglake", + "google-cloud-bigquery-biglake Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-bigquery-biglake", + "google-cloud-bigquery-biglake Documentation", + author, + "google-cloud-bigquery-biglake", + "google-cloud-bigquery-biglake Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-bigquery-biglake/docs/index.rst b/packages/google-cloud-bigquery-biglake/docs/index.rst new file mode 100644 index 000000000000..eb70727b91cd --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/docs/index.rst @@ -0,0 +1,34 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of BigLake API. +By default, you will get version ``biglake_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + biglake_v1/services + biglake_v1/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + biglake_v1alpha1/services + biglake_v1alpha1/types + + +Changelog +--------- + +For a list of all ``google-cloud-bigquery-biglake`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-bigquery-biglake/docs/multiprocessing.rst b/packages/google-cloud-bigquery-biglake/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake/__init__.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake/__init__.py new file mode 100644 index 000000000000..6d15df0484c5 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake/__init__.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.bigquery.biglake import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.bigquery.biglake_v1.services.metastore_service.async_client import ( + MetastoreServiceAsyncClient, +) +from google.cloud.bigquery.biglake_v1.services.metastore_service.client import ( + MetastoreServiceClient, +) +from google.cloud.bigquery.biglake_v1.types.metastore import ( + Catalog, + CreateCatalogRequest, + CreateDatabaseRequest, + CreateTableRequest, + Database, + DeleteCatalogRequest, + DeleteDatabaseRequest, + DeleteTableRequest, + GetCatalogRequest, + GetDatabaseRequest, + GetTableRequest, + HiveDatabaseOptions, + HiveTableOptions, + ListCatalogsRequest, + ListCatalogsResponse, + ListDatabasesRequest, + ListDatabasesResponse, + ListTablesRequest, + ListTablesResponse, + RenameTableRequest, + Table, + TableView, + UpdateDatabaseRequest, + UpdateTableRequest, +) + +__all__ = ( + "MetastoreServiceClient", + "MetastoreServiceAsyncClient", + "Catalog", + "CreateCatalogRequest", + "CreateDatabaseRequest", + "CreateTableRequest", + "Database", + "DeleteCatalogRequest", + "DeleteDatabaseRequest", + "DeleteTableRequest", + "GetCatalogRequest", + "GetDatabaseRequest", + "GetTableRequest", + "HiveDatabaseOptions", + "HiveTableOptions", + "ListCatalogsRequest", + "ListCatalogsResponse", + "ListDatabasesRequest", + "ListDatabasesResponse", + "ListTablesRequest", + "ListTablesResponse", + "RenameTableRequest", + "Table", + "UpdateDatabaseRequest", + "UpdateTableRequest", + "TableView", +) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake/gapic_version.py new file mode 100644 index 000000000000..c050f9a7e392 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.3.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake/py.typed b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake/py.typed new file mode 100644 index 000000000000..70e9a3b83398 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-biglake package uses inline types. diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/__init__.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/__init__.py new file mode 100644 index 000000000000..92f4b511457a --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/__init__.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.bigquery.biglake_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.metastore_service import ( + MetastoreServiceAsyncClient, + MetastoreServiceClient, +) +from .types.metastore import ( + Catalog, + CreateCatalogRequest, + CreateDatabaseRequest, + CreateTableRequest, + Database, + DeleteCatalogRequest, + DeleteDatabaseRequest, + DeleteTableRequest, + GetCatalogRequest, + GetDatabaseRequest, + GetTableRequest, + HiveDatabaseOptions, + HiveTableOptions, + ListCatalogsRequest, + ListCatalogsResponse, + ListDatabasesRequest, + ListDatabasesResponse, + ListTablesRequest, + ListTablesResponse, + RenameTableRequest, + Table, + TableView, + UpdateDatabaseRequest, + UpdateTableRequest, +) + +__all__ = ( + "MetastoreServiceAsyncClient", + "Catalog", + "CreateCatalogRequest", + "CreateDatabaseRequest", + "CreateTableRequest", + "Database", + "DeleteCatalogRequest", + "DeleteDatabaseRequest", + "DeleteTableRequest", + "GetCatalogRequest", + "GetDatabaseRequest", + "GetTableRequest", + "HiveDatabaseOptions", + "HiveTableOptions", + "ListCatalogsRequest", + "ListCatalogsResponse", + "ListDatabasesRequest", + "ListDatabasesResponse", + "ListTablesRequest", + "ListTablesResponse", + "MetastoreServiceClient", + "RenameTableRequest", + "Table", + "TableView", + "UpdateDatabaseRequest", + "UpdateTableRequest", +) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/gapic_metadata.json b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/gapic_metadata.json new file mode 100644 index 000000000000..9169edcbf6f9 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/gapic_metadata.json @@ -0,0 +1,253 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.bigquery.biglake_v1", + "protoPackage": "google.cloud.bigquery.biglake.v1", + "schema": "1.0", + "services": { + "MetastoreService": { + "clients": { + "grpc": { + "libraryClient": "MetastoreServiceClient", + "rpcs": { + "CreateCatalog": { + "methods": [ + "create_catalog" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "CreateTable": { + "methods": [ + "create_table" + ] + }, + "DeleteCatalog": { + "methods": [ + "delete_catalog" + ] + }, + "DeleteDatabase": { + "methods": [ + "delete_database" + ] + }, + "DeleteTable": { + "methods": [ + "delete_table" + ] + }, + "GetCatalog": { + "methods": [ + "get_catalog" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetTable": { + "methods": [ + "get_table" + ] + }, + "ListCatalogs": { + "methods": [ + "list_catalogs" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "ListTables": { + "methods": [ + "list_tables" + ] + }, + "RenameTable": { + "methods": [ + "rename_table" + ] + }, + "UpdateDatabase": { + "methods": [ + "update_database" + ] + }, + "UpdateTable": { + "methods": [ + "update_table" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MetastoreServiceAsyncClient", + "rpcs": { + "CreateCatalog": { + "methods": [ + "create_catalog" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "CreateTable": { + "methods": [ + "create_table" + ] + }, + "DeleteCatalog": { + "methods": [ + "delete_catalog" + ] + }, + "DeleteDatabase": { + "methods": [ + "delete_database" + ] + }, + "DeleteTable": { + "methods": [ + "delete_table" + ] + }, + "GetCatalog": { + "methods": [ + "get_catalog" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetTable": { + "methods": [ + "get_table" + ] + }, + "ListCatalogs": { + "methods": [ + "list_catalogs" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "ListTables": { + "methods": [ + "list_tables" + ] + }, + "RenameTable": { + "methods": [ + "rename_table" + ] + }, + "UpdateDatabase": { + "methods": [ + "update_database" + ] + }, + "UpdateTable": { + "methods": [ + "update_table" + ] + } + } + }, + "rest": { + "libraryClient": "MetastoreServiceClient", + "rpcs": { + "CreateCatalog": { + "methods": [ + "create_catalog" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "CreateTable": { + "methods": [ + "create_table" + ] + }, + "DeleteCatalog": { + "methods": [ + "delete_catalog" + ] + }, + "DeleteDatabase": { + "methods": [ + "delete_database" + ] + }, + "DeleteTable": { + "methods": [ + "delete_table" + ] + }, + "GetCatalog": { + "methods": [ + "get_catalog" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetTable": { + "methods": [ + "get_table" + ] + }, + "ListCatalogs": { + "methods": [ + "list_catalogs" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "ListTables": { + "methods": [ + "list_tables" + ] + }, + "RenameTable": { + "methods": [ + "rename_table" + ] + }, + "UpdateDatabase": { + "methods": [ + "update_database" + ] + }, + "UpdateTable": { + "methods": [ + "update_table" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/gapic_version.py new file mode 100644 index 000000000000..c050f9a7e392 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.3.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/py.typed b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/py.typed new file mode 100644 index 000000000000..70e9a3b83398 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-biglake package uses inline types. diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/__init__.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/__init__.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/__init__.py new file mode 100644 index 000000000000..dbacb2f78046 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import MetastoreServiceAsyncClient +from .client import MetastoreServiceClient + +__all__ = ( + "MetastoreServiceClient", + "MetastoreServiceAsyncClient", +) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/async_client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/async_client.py new file mode 100644 index 000000000000..5676e401afad --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/async_client.py @@ -0,0 +1,1924 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery.biglake_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.bigquery.biglake_v1.services.metastore_service import pagers +from google.cloud.bigquery.biglake_v1.types import metastore + +from .client import MetastoreServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, MetastoreServiceTransport +from .transports.grpc_asyncio import MetastoreServiceGrpcAsyncIOTransport + + +class MetastoreServiceAsyncClient: + """BigLake Metastore is a serverless, highly available, multi-tenant + runtime metastore for Google Cloud Data Analytics products. + + The BigLake Metastore API defines the following resource model: + + - A collection of Google Cloud projects: ``/projects/*`` + - Each project has a collection of available locations: + ``/locations/*`` + - Each location has a collection of catalogs: ``/catalogs/*`` + - Each catalog has a collection of databases: ``/databases/*`` + - Each database has a collection of tables: ``/tables/*`` + """ + + _client: MetastoreServiceClient + + DEFAULT_ENDPOINT = MetastoreServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MetastoreServiceClient.DEFAULT_MTLS_ENDPOINT + + catalog_path = staticmethod(MetastoreServiceClient.catalog_path) + parse_catalog_path = staticmethod(MetastoreServiceClient.parse_catalog_path) + database_path = staticmethod(MetastoreServiceClient.database_path) + parse_database_path = staticmethod(MetastoreServiceClient.parse_database_path) + table_path = staticmethod(MetastoreServiceClient.table_path) + parse_table_path = staticmethod(MetastoreServiceClient.parse_table_path) + common_billing_account_path = staticmethod( + MetastoreServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MetastoreServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(MetastoreServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + MetastoreServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + MetastoreServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + MetastoreServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(MetastoreServiceClient.common_project_path) + parse_common_project_path = staticmethod( + MetastoreServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(MetastoreServiceClient.common_location_path) + parse_common_location_path = staticmethod( + MetastoreServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetastoreServiceAsyncClient: The constructed client. + """ + return MetastoreServiceClient.from_service_account_info.__func__(MetastoreServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetastoreServiceAsyncClient: The constructed client. + """ + return MetastoreServiceClient.from_service_account_file.__func__(MetastoreServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MetastoreServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MetastoreServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MetastoreServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(MetastoreServiceClient).get_transport_class, type(MetastoreServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, MetastoreServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metastore service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.MetastoreServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MetastoreServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_catalog( + self, + request: Optional[Union[metastore.CreateCatalogRequest, dict]] = None, + *, + parent: Optional[str] = None, + catalog: Optional[metastore.Catalog] = None, + catalog_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Creates a new catalog. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + async def sample_create_catalog(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.CreateCatalogRequest( + parent="parent_value", + catalog_id="catalog_id_value", + ) + + # Make the request + response = await client.create_catalog(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1.types.CreateCatalogRequest, dict]]): + The request object. Request message for the CreateCatalog + method. + parent (:class:`str`): + Required. The parent resource where this catalog will be + created. Format: + projects/{project_id_or_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + catalog (:class:`google.cloud.bigquery.biglake_v1.types.Catalog`): + Required. The catalog to create. The ``name`` field does + not need to be provided. + + This corresponds to the ``catalog`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + catalog_id (:class:`str`): + Required. The ID to use for the + catalog, which will become the final + component of the catalog's resource + name. + + This corresponds to the ``catalog_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Catalog: + Catalog is the container of + databases. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, catalog, catalog_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.CreateCatalogRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if catalog is not None: + request.catalog = catalog + if catalog_id is not None: + request.catalog_id = catalog_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_catalog, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_catalog( + self, + request: Optional[Union[metastore.DeleteCatalogRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Deletes an existing catalog specified by the catalog + ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + async def sample_delete_catalog(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.DeleteCatalogRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_catalog(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1.types.DeleteCatalogRequest, dict]]): + The request object. Request message for the DeleteCatalog + method. + name (:class:`str`): + Required. The name of the catalog to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Catalog: + Catalog is the container of + databases. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.DeleteCatalogRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_catalog, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_catalog( + self, + request: Optional[Union[metastore.GetCatalogRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Gets the catalog specified by the resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + async def sample_get_catalog(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.GetCatalogRequest( + name="name_value", + ) + + # Make the request + response = await client.get_catalog(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1.types.GetCatalogRequest, dict]]): + The request object. Request message for the GetCatalog + method. + name (:class:`str`): + Required. The name of the catalog to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Catalog: + Catalog is the container of + databases. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.GetCatalogRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_catalog, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_catalogs( + self, + request: Optional[Union[metastore.ListCatalogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCatalogsAsyncPager: + r"""List all catalogs in a specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + async def sample_list_catalogs(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.ListCatalogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_catalogs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1.types.ListCatalogsRequest, dict]]): + The request object. Request message for the ListCatalogs + method. + parent (:class:`str`): + Required. The parent, which owns this collection of + catalogs. Format: + projects/{project_id_or_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.services.metastore_service.pagers.ListCatalogsAsyncPager: + Response message for the ListCatalogs + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.ListCatalogsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_catalogs, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListCatalogsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_database( + self, + request: Optional[Union[metastore.CreateDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + database: Optional[metastore.Database] = None, + database_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Creates a new database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + async def sample_create_database(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = await client.create_database(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1.types.CreateDatabaseRequest, dict]]): + The request object. Request message for the + CreateDatabase method. + parent (:class:`str`): + Required. The parent resource where this database will + be created. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database (:class:`google.cloud.bigquery.biglake_v1.types.Database`): + Required. The database to create. The ``name`` field + does not need to be provided. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (:class:`str`): + Required. The ID to use for the + database, which will become the final + component of the database's resource + name. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, database, database_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.CreateDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database is not None: + request.database = database + if database_id is not None: + request.database_id = database_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_database, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_database( + self, + request: Optional[Union[metastore.DeleteDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Deletes an existing database specified by the + database ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + async def sample_delete_database(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.DeleteDatabaseRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_database(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1.types.DeleteDatabaseRequest, dict]]): + The request object. Request message for the + DeleteDatabase method. + name (:class:`str`): + Required. The name of the database to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.DeleteDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_database, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_database( + self, + request: Optional[Union[metastore.UpdateDatabaseRequest, dict]] = None, + *, + database: Optional[metastore.Database] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Updates an existing database specified by the + database ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + async def sample_update_database(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.UpdateDatabaseRequest( + ) + + # Make the request + response = await client.update_database(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1.types.UpdateDatabaseRequest, dict]]): + The request object. Request message for the + UpdateDatabase method. + database (:class:`google.cloud.bigquery.biglake_v1.types.Database`): + Required. The database to update. + + The database's ``name`` field is used to identify the + database to update. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to update. + + For the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are + allowed to update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.UpdateDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_database, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("database.name", request.database.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_database( + self, + request: Optional[Union[metastore.GetDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Gets the database specified by the resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + async def sample_get_database(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = await client.get_database(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1.types.GetDatabaseRequest, dict]]): + The request object. Request message for the GetDatabase + method. + name (:class:`str`): + Required. The name of the database to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.GetDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_database, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_databases( + self, + request: Optional[Union[metastore.ListDatabasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatabasesAsyncPager: + r"""List all databases in a specified catalog. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + async def sample_list_databases(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1.types.ListDatabasesRequest, dict]]): + The request object. Request message for the ListDatabases + method. + parent (:class:`str`): + Required. The parent, which owns this collection of + databases. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.services.metastore_service.pagers.ListDatabasesAsyncPager: + Response message for the + ListDatabases method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.ListDatabasesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_databases, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatabasesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_table( + self, + request: Optional[Union[metastore.CreateTableRequest, dict]] = None, + *, + parent: Optional[str] = None, + table: Optional[metastore.Table] = None, + table_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Creates a new table. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + async def sample_create_table(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.CreateTableRequest( + parent="parent_value", + table_id="table_id_value", + ) + + # Make the request + response = await client.create_table(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1.types.CreateTableRequest, dict]]): + The request object. Request message for the CreateTable + method. + parent (:class:`str`): + Required. The parent resource where this table will be + created. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + table (:class:`google.cloud.bigquery.biglake_v1.types.Table`): + Required. The table to create. The ``name`` field does + not need to be provided for the table creation. + + This corresponds to the ``table`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + table_id (:class:`str`): + Required. The ID to use for the + table, which will become the final + component of the table's resource name. + + This corresponds to the ``table_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, table, table_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.CreateTableRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if table is not None: + request.table = table + if table_id is not None: + request.table_id = table_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_table, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_table( + self, + request: Optional[Union[metastore.DeleteTableRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Deletes an existing table specified by the table ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + async def sample_delete_table(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.DeleteTableRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_table(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1.types.DeleteTableRequest, dict]]): + The request object. Request message for the DeleteTable + method. + name (:class:`str`): + Required. The name of the table to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.DeleteTableRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_table, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_table( + self, + request: Optional[Union[metastore.UpdateTableRequest, dict]] = None, + *, + table: Optional[metastore.Table] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Updates an existing table specified by the table ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + async def sample_update_table(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.UpdateTableRequest( + ) + + # Make the request + response = await client.update_table(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1.types.UpdateTableRequest, dict]]): + The request object. Request message for the UpdateTable + method. + table (:class:`google.cloud.bigquery.biglake_v1.types.Table`): + Required. The table to update. + + The table's ``name`` field is used to identify the table + to update. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``table`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to update. + + For the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are + allowed to update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([table, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.UpdateTableRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if table is not None: + request.table = table + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_table, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("table.name", request.table.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def rename_table( + self, + request: Optional[Union[metastore.RenameTableRequest, dict]] = None, + *, + name: Optional[str] = None, + new_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Renames an existing table specified by the table ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + async def sample_rename_table(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.RenameTableRequest( + name="name_value", + new_name="new_name_value", + ) + + # Make the request + response = await client.rename_table(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1.types.RenameTableRequest, dict]]): + The request object. Request message for the RenameTable + method in MetastoreService + name (:class:`str`): + Required. The table's ``name`` field is used to identify + the table to rename. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_name (:class:`str`): + Required. The new ``name`` for the specified table, must + be in the same database. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``new_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.RenameTableRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_name is not None: + request.new_name = new_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rename_table, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_table( + self, + request: Optional[Union[metastore.GetTableRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Gets the table specified by the resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + async def sample_get_table(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.GetTableRequest( + name="name_value", + ) + + # Make the request + response = await client.get_table(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1.types.GetTableRequest, dict]]): + The request object. Request message for the GetTable + method. + name (:class:`str`): + Required. The name of the table to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.GetTableRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_table, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_tables( + self, + request: Optional[Union[metastore.ListTablesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTablesAsyncPager: + r"""List all tables in a specified database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + async def sample_list_tables(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.ListTablesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tables(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1.types.ListTablesRequest, dict]]): + The request object. Request message for the ListTables + method. + parent (:class:`str`): + Required. The parent, which owns this collection of + tables. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.services.metastore_service.pagers.ListTablesAsyncPager: + Response message for the ListTables + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.ListTablesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tables, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTablesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MetastoreServiceAsyncClient",) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/client.py new file mode 100644 index 000000000000..8961708024b2 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/client.py @@ -0,0 +1,2206 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery.biglake_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.bigquery.biglake_v1.services.metastore_service import pagers +from google.cloud.bigquery.biglake_v1.types import metastore + +from .transports.base import DEFAULT_CLIENT_INFO, MetastoreServiceTransport +from .transports.grpc import MetastoreServiceGrpcTransport +from .transports.grpc_asyncio import MetastoreServiceGrpcAsyncIOTransport +from .transports.rest import MetastoreServiceRestTransport + + +class MetastoreServiceClientMeta(type): + """Metaclass for the MetastoreService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[MetastoreServiceTransport]] + _transport_registry["grpc"] = MetastoreServiceGrpcTransport + _transport_registry["grpc_asyncio"] = MetastoreServiceGrpcAsyncIOTransport + _transport_registry["rest"] = MetastoreServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MetastoreServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MetastoreServiceClient(metaclass=MetastoreServiceClientMeta): + """BigLake Metastore is a serverless, highly available, multi-tenant + runtime metastore for Google Cloud Data Analytics products. + + The BigLake Metastore API defines the following resource model: + + - A collection of Google Cloud projects: ``/projects/*`` + - Each project has a collection of available locations: + ``/locations/*`` + - Each location has a collection of catalogs: ``/catalogs/*`` + - Each catalog has a collection of databases: ``/databases/*`` + - Each database has a collection of tables: ``/tables/*`` + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "biglake.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetastoreServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetastoreServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetastoreServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MetastoreServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def catalog_path( + project: str, + location: str, + catalog: str, + ) -> str: + """Returns a fully-qualified catalog string.""" + return "projects/{project}/locations/{location}/catalogs/{catalog}".format( + project=project, + location=location, + catalog=catalog, + ) + + @staticmethod + def parse_catalog_path(path: str) -> Dict[str, str]: + """Parses a catalog path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def database_path( + project: str, + location: str, + catalog: str, + database: str, + ) -> str: + """Returns a fully-qualified database string.""" + return "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}".format( + project=project, + location=location, + catalog=catalog, + database=database, + ) + + @staticmethod + def parse_database_path(path: str) -> Dict[str, str]: + """Parses a database path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/databases/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def table_path( + project: str, + location: str, + catalog: str, + database: str, + table: str, + ) -> str: + """Returns a fully-qualified table string.""" + return "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/tables/{table}".format( + project=project, + location=location, + catalog=catalog, + database=database, + table=table, + ) + + @staticmethod + def parse_table_path(path: str) -> Dict[str, str]: + """Parses a table path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/databases/(?P.+?)/tables/(?P
.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MetastoreServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metastore service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, MetastoreServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MetastoreServiceTransport): + # transport is a MetastoreServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_catalog( + self, + request: Optional[Union[metastore.CreateCatalogRequest, dict]] = None, + *, + parent: Optional[str] = None, + catalog: Optional[metastore.Catalog] = None, + catalog_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Creates a new catalog. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + def sample_create_catalog(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.CreateCatalogRequest( + parent="parent_value", + catalog_id="catalog_id_value", + ) + + # Make the request + response = client.create_catalog(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1.types.CreateCatalogRequest, dict]): + The request object. Request message for the CreateCatalog + method. + parent (str): + Required. The parent resource where this catalog will be + created. Format: + projects/{project_id_or_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + catalog (google.cloud.bigquery.biglake_v1.types.Catalog): + Required. The catalog to create. The ``name`` field does + not need to be provided. + + This corresponds to the ``catalog`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + catalog_id (str): + Required. The ID to use for the + catalog, which will become the final + component of the catalog's resource + name. + + This corresponds to the ``catalog_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Catalog: + Catalog is the container of + databases. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, catalog, catalog_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.CreateCatalogRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.CreateCatalogRequest): + request = metastore.CreateCatalogRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if catalog is not None: + request.catalog = catalog + if catalog_id is not None: + request.catalog_id = catalog_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_catalog] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_catalog( + self, + request: Optional[Union[metastore.DeleteCatalogRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Deletes an existing catalog specified by the catalog + ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + def sample_delete_catalog(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.DeleteCatalogRequest( + name="name_value", + ) + + # Make the request + response = client.delete_catalog(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1.types.DeleteCatalogRequest, dict]): + The request object. Request message for the DeleteCatalog + method. + name (str): + Required. The name of the catalog to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Catalog: + Catalog is the container of + databases. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.DeleteCatalogRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.DeleteCatalogRequest): + request = metastore.DeleteCatalogRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_catalog] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_catalog( + self, + request: Optional[Union[metastore.GetCatalogRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Gets the catalog specified by the resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + def sample_get_catalog(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.GetCatalogRequest( + name="name_value", + ) + + # Make the request + response = client.get_catalog(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1.types.GetCatalogRequest, dict]): + The request object. Request message for the GetCatalog + method. + name (str): + Required. The name of the catalog to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Catalog: + Catalog is the container of + databases. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.GetCatalogRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.GetCatalogRequest): + request = metastore.GetCatalogRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_catalog] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_catalogs( + self, + request: Optional[Union[metastore.ListCatalogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCatalogsPager: + r"""List all catalogs in a specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + def sample_list_catalogs(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.ListCatalogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_catalogs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1.types.ListCatalogsRequest, dict]): + The request object. Request message for the ListCatalogs + method. + parent (str): + Required. The parent, which owns this collection of + catalogs. Format: + projects/{project_id_or_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.services.metastore_service.pagers.ListCatalogsPager: + Response message for the ListCatalogs + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.ListCatalogsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.ListCatalogsRequest): + request = metastore.ListCatalogsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_catalogs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCatalogsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_database( + self, + request: Optional[Union[metastore.CreateDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + database: Optional[metastore.Database] = None, + database_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Creates a new database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + def sample_create_database(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = client.create_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1.types.CreateDatabaseRequest, dict]): + The request object. Request message for the + CreateDatabase method. + parent (str): + Required. The parent resource where this database will + be created. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database (google.cloud.bigquery.biglake_v1.types.Database): + Required. The database to create. The ``name`` field + does not need to be provided. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (str): + Required. The ID to use for the + database, which will become the final + component of the database's resource + name. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, database, database_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.CreateDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.CreateDatabaseRequest): + request = metastore.CreateDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database is not None: + request.database = database + if database_id is not None: + request.database_id = database_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_database( + self, + request: Optional[Union[metastore.DeleteDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Deletes an existing database specified by the + database ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + def sample_delete_database(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.DeleteDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.delete_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1.types.DeleteDatabaseRequest, dict]): + The request object. Request message for the + DeleteDatabase method. + name (str): + Required. The name of the database to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.DeleteDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.DeleteDatabaseRequest): + request = metastore.DeleteDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_database( + self, + request: Optional[Union[metastore.UpdateDatabaseRequest, dict]] = None, + *, + database: Optional[metastore.Database] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Updates an existing database specified by the + database ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + def sample_update_database(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.UpdateDatabaseRequest( + ) + + # Make the request + response = client.update_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1.types.UpdateDatabaseRequest, dict]): + The request object. Request message for the + UpdateDatabase method. + database (google.cloud.bigquery.biglake_v1.types.Database): + Required. The database to update. + + The database's ``name`` field is used to identify the + database to update. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. + + For the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are + allowed to update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.UpdateDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.UpdateDatabaseRequest): + request = metastore.UpdateDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("database.name", request.database.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_database( + self, + request: Optional[Union[metastore.GetDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Gets the database specified by the resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + def sample_get_database(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1.types.GetDatabaseRequest, dict]): + The request object. Request message for the GetDatabase + method. + name (str): + Required. The name of the database to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.GetDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.GetDatabaseRequest): + request = metastore.GetDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_databases( + self, + request: Optional[Union[metastore.ListDatabasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatabasesPager: + r"""List all databases in a specified catalog. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + def sample_list_databases(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1.types.ListDatabasesRequest, dict]): + The request object. Request message for the ListDatabases + method. + parent (str): + Required. The parent, which owns this collection of + databases. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.services.metastore_service.pagers.ListDatabasesPager: + Response message for the + ListDatabases method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.ListDatabasesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.ListDatabasesRequest): + request = metastore.ListDatabasesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_databases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatabasesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_table( + self, + request: Optional[Union[metastore.CreateTableRequest, dict]] = None, + *, + parent: Optional[str] = None, + table: Optional[metastore.Table] = None, + table_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Creates a new table. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + def sample_create_table(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.CreateTableRequest( + parent="parent_value", + table_id="table_id_value", + ) + + # Make the request + response = client.create_table(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1.types.CreateTableRequest, dict]): + The request object. Request message for the CreateTable + method. + parent (str): + Required. The parent resource where this table will be + created. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + table (google.cloud.bigquery.biglake_v1.types.Table): + Required. The table to create. The ``name`` field does + not need to be provided for the table creation. + + This corresponds to the ``table`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + table_id (str): + Required. The ID to use for the + table, which will become the final + component of the table's resource name. + + This corresponds to the ``table_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, table, table_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.CreateTableRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.CreateTableRequest): + request = metastore.CreateTableRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if table is not None: + request.table = table + if table_id is not None: + request.table_id = table_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_table] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_table( + self, + request: Optional[Union[metastore.DeleteTableRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Deletes an existing table specified by the table ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + def sample_delete_table(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.DeleteTableRequest( + name="name_value", + ) + + # Make the request + response = client.delete_table(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1.types.DeleteTableRequest, dict]): + The request object. Request message for the DeleteTable + method. + name (str): + Required. The name of the table to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.DeleteTableRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.DeleteTableRequest): + request = metastore.DeleteTableRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_table] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_table( + self, + request: Optional[Union[metastore.UpdateTableRequest, dict]] = None, + *, + table: Optional[metastore.Table] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Updates an existing table specified by the table ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + def sample_update_table(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.UpdateTableRequest( + ) + + # Make the request + response = client.update_table(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1.types.UpdateTableRequest, dict]): + The request object. Request message for the UpdateTable + method. + table (google.cloud.bigquery.biglake_v1.types.Table): + Required. The table to update. + + The table's ``name`` field is used to identify the table + to update. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``table`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. + + For the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are + allowed to update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([table, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.UpdateTableRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.UpdateTableRequest): + request = metastore.UpdateTableRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if table is not None: + request.table = table + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_table] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("table.name", request.table.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def rename_table( + self, + request: Optional[Union[metastore.RenameTableRequest, dict]] = None, + *, + name: Optional[str] = None, + new_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Renames an existing table specified by the table ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + def sample_rename_table(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.RenameTableRequest( + name="name_value", + new_name="new_name_value", + ) + + # Make the request + response = client.rename_table(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1.types.RenameTableRequest, dict]): + The request object. Request message for the RenameTable + method in MetastoreService + name (str): + Required. The table's ``name`` field is used to identify + the table to rename. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_name (str): + Required. The new ``name`` for the specified table, must + be in the same database. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``new_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.RenameTableRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.RenameTableRequest): + request = metastore.RenameTableRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_name is not None: + request.new_name = new_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rename_table] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_table( + self, + request: Optional[Union[metastore.GetTableRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Gets the table specified by the resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + def sample_get_table(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.GetTableRequest( + name="name_value", + ) + + # Make the request + response = client.get_table(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1.types.GetTableRequest, dict]): + The request object. Request message for the GetTable + method. + name (str): + Required. The name of the table to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.GetTableRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.GetTableRequest): + request = metastore.GetTableRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_table] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_tables( + self, + request: Optional[Union[metastore.ListTablesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTablesPager: + r"""List all tables in a specified database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1 + + def sample_list_tables(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.ListTablesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tables(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1.types.ListTablesRequest, dict]): + The request object. Request message for the ListTables + method. + parent (str): + Required. The parent, which owns this collection of + tables. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1.services.metastore_service.pagers.ListTablesPager: + Response message for the ListTables + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.ListTablesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.ListTablesRequest): + request = metastore.ListTablesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tables] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTablesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "MetastoreServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MetastoreServiceClient",) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/pagers.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/pagers.py new file mode 100644 index 000000000000..345e98d3bea6 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/pagers.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.bigquery.biglake_v1.types import metastore + + +class ListCatalogsPager: + """A pager for iterating through ``list_catalogs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery.biglake_v1.types.ListCatalogsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``catalogs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCatalogs`` requests and continue to iterate + through the ``catalogs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery.biglake_v1.types.ListCatalogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., metastore.ListCatalogsResponse], + request: metastore.ListCatalogsRequest, + response: metastore.ListCatalogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery.biglake_v1.types.ListCatalogsRequest): + The initial request object. + response (google.cloud.bigquery.biglake_v1.types.ListCatalogsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListCatalogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[metastore.ListCatalogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[metastore.Catalog]: + for page in self.pages: + yield from page.catalogs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListCatalogsAsyncPager: + """A pager for iterating through ``list_catalogs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery.biglake_v1.types.ListCatalogsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``catalogs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListCatalogs`` requests and continue to iterate + through the ``catalogs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery.biglake_v1.types.ListCatalogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[metastore.ListCatalogsResponse]], + request: metastore.ListCatalogsRequest, + response: metastore.ListCatalogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery.biglake_v1.types.ListCatalogsRequest): + The initial request object. + response (google.cloud.bigquery.biglake_v1.types.ListCatalogsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListCatalogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[metastore.ListCatalogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[metastore.Catalog]: + async def async_generator(): + async for page in self.pages: + for response in page.catalogs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDatabasesPager: + """A pager for iterating through ``list_databases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery.biglake_v1.types.ListDatabasesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``databases`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatabases`` requests and continue to iterate + through the ``databases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery.biglake_v1.types.ListDatabasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., metastore.ListDatabasesResponse], + request: metastore.ListDatabasesRequest, + response: metastore.ListDatabasesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery.biglake_v1.types.ListDatabasesRequest): + The initial request object. + response (google.cloud.bigquery.biglake_v1.types.ListDatabasesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListDatabasesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[metastore.ListDatabasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[metastore.Database]: + for page in self.pages: + yield from page.databases + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDatabasesAsyncPager: + """A pager for iterating through ``list_databases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery.biglake_v1.types.ListDatabasesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``databases`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatabases`` requests and continue to iterate + through the ``databases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery.biglake_v1.types.ListDatabasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[metastore.ListDatabasesResponse]], + request: metastore.ListDatabasesRequest, + response: metastore.ListDatabasesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery.biglake_v1.types.ListDatabasesRequest): + The initial request object. + response (google.cloud.bigquery.biglake_v1.types.ListDatabasesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListDatabasesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[metastore.ListDatabasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[metastore.Database]: + async def async_generator(): + async for page in self.pages: + for response in page.databases: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTablesPager: + """A pager for iterating through ``list_tables`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery.biglake_v1.types.ListTablesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tables`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTables`` requests and continue to iterate + through the ``tables`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery.biglake_v1.types.ListTablesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., metastore.ListTablesResponse], + request: metastore.ListTablesRequest, + response: metastore.ListTablesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery.biglake_v1.types.ListTablesRequest): + The initial request object. + response (google.cloud.bigquery.biglake_v1.types.ListTablesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListTablesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[metastore.ListTablesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[metastore.Table]: + for page in self.pages: + yield from page.tables + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTablesAsyncPager: + """A pager for iterating through ``list_tables`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery.biglake_v1.types.ListTablesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tables`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTables`` requests and continue to iterate + through the ``tables`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery.biglake_v1.types.ListTablesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[metastore.ListTablesResponse]], + request: metastore.ListTablesRequest, + response: metastore.ListTablesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery.biglake_v1.types.ListTablesRequest): + The initial request object. + response (google.cloud.bigquery.biglake_v1.types.ListTablesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListTablesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[metastore.ListTablesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[metastore.Table]: + async def async_generator(): + async for page in self.pages: + for response in page.tables: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/transports/__init__.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/transports/__init__.py new file mode 100644 index 000000000000..239db2ae2de3 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MetastoreServiceTransport +from .grpc import MetastoreServiceGrpcTransport +from .grpc_asyncio import MetastoreServiceGrpcAsyncIOTransport +from .rest import MetastoreServiceRestInterceptor, MetastoreServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MetastoreServiceTransport]] +_transport_registry["grpc"] = MetastoreServiceGrpcTransport +_transport_registry["grpc_asyncio"] = MetastoreServiceGrpcAsyncIOTransport +_transport_registry["rest"] = MetastoreServiceRestTransport + +__all__ = ( + "MetastoreServiceTransport", + "MetastoreServiceGrpcTransport", + "MetastoreServiceGrpcAsyncIOTransport", + "MetastoreServiceRestTransport", + "MetastoreServiceRestInterceptor", +) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/transports/base.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/transports/base.py new file mode 100644 index 000000000000..77e7d2bb2239 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/transports/base.py @@ -0,0 +1,357 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery.biglake_v1 import gapic_version as package_version +from google.cloud.bigquery.biglake_v1.types import metastore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class MetastoreServiceTransport(abc.ABC): + """Abstract transport class for MetastoreService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "biglake.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_catalog: gapic_v1.method.wrap_method( + self.create_catalog, + default_timeout=None, + client_info=client_info, + ), + self.delete_catalog: gapic_v1.method.wrap_method( + self.delete_catalog, + default_timeout=None, + client_info=client_info, + ), + self.get_catalog: gapic_v1.method.wrap_method( + self.get_catalog, + default_timeout=None, + client_info=client_info, + ), + self.list_catalogs: gapic_v1.method.wrap_method( + self.list_catalogs, + default_timeout=None, + client_info=client_info, + ), + self.create_database: gapic_v1.method.wrap_method( + self.create_database, + default_timeout=None, + client_info=client_info, + ), + self.delete_database: gapic_v1.method.wrap_method( + self.delete_database, + default_timeout=None, + client_info=client_info, + ), + self.update_database: gapic_v1.method.wrap_method( + self.update_database, + default_timeout=None, + client_info=client_info, + ), + self.get_database: gapic_v1.method.wrap_method( + self.get_database, + default_timeout=None, + client_info=client_info, + ), + self.list_databases: gapic_v1.method.wrap_method( + self.list_databases, + default_timeout=None, + client_info=client_info, + ), + self.create_table: gapic_v1.method.wrap_method( + self.create_table, + default_timeout=None, + client_info=client_info, + ), + self.delete_table: gapic_v1.method.wrap_method( + self.delete_table, + default_timeout=None, + client_info=client_info, + ), + self.update_table: gapic_v1.method.wrap_method( + self.update_table, + default_timeout=None, + client_info=client_info, + ), + self.rename_table: gapic_v1.method.wrap_method( + self.rename_table, + default_timeout=None, + client_info=client_info, + ), + self.get_table: gapic_v1.method.wrap_method( + self.get_table, + default_timeout=None, + client_info=client_info, + ), + self.list_tables: gapic_v1.method.wrap_method( + self.list_tables, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_catalog( + self, + ) -> Callable[ + [metastore.CreateCatalogRequest], + Union[metastore.Catalog, Awaitable[metastore.Catalog]], + ]: + raise NotImplementedError() + + @property + def delete_catalog( + self, + ) -> Callable[ + [metastore.DeleteCatalogRequest], + Union[metastore.Catalog, Awaitable[metastore.Catalog]], + ]: + raise NotImplementedError() + + @property + def get_catalog( + self, + ) -> Callable[ + [metastore.GetCatalogRequest], + Union[metastore.Catalog, Awaitable[metastore.Catalog]], + ]: + raise NotImplementedError() + + @property + def list_catalogs( + self, + ) -> Callable[ + [metastore.ListCatalogsRequest], + Union[ + metastore.ListCatalogsResponse, Awaitable[metastore.ListCatalogsResponse] + ], + ]: + raise NotImplementedError() + + @property + def create_database( + self, + ) -> Callable[ + [metastore.CreateDatabaseRequest], + Union[metastore.Database, Awaitable[metastore.Database]], + ]: + raise NotImplementedError() + + @property + def delete_database( + self, + ) -> Callable[ + [metastore.DeleteDatabaseRequest], + Union[metastore.Database, Awaitable[metastore.Database]], + ]: + raise NotImplementedError() + + @property + def update_database( + self, + ) -> Callable[ + [metastore.UpdateDatabaseRequest], + Union[metastore.Database, Awaitable[metastore.Database]], + ]: + raise NotImplementedError() + + @property + def get_database( + self, + ) -> Callable[ + [metastore.GetDatabaseRequest], + Union[metastore.Database, Awaitable[metastore.Database]], + ]: + raise NotImplementedError() + + @property + def list_databases( + self, + ) -> Callable[ + [metastore.ListDatabasesRequest], + Union[ + metastore.ListDatabasesResponse, Awaitable[metastore.ListDatabasesResponse] + ], + ]: + raise NotImplementedError() + + @property + def create_table( + self, + ) -> Callable[ + [metastore.CreateTableRequest], + Union[metastore.Table, Awaitable[metastore.Table]], + ]: + raise NotImplementedError() + + @property + def delete_table( + self, + ) -> Callable[ + [metastore.DeleteTableRequest], + Union[metastore.Table, Awaitable[metastore.Table]], + ]: + raise NotImplementedError() + + @property + def update_table( + self, + ) -> Callable[ + [metastore.UpdateTableRequest], + Union[metastore.Table, Awaitable[metastore.Table]], + ]: + raise NotImplementedError() + + @property + def rename_table( + self, + ) -> Callable[ + [metastore.RenameTableRequest], + Union[metastore.Table, Awaitable[metastore.Table]], + ]: + raise NotImplementedError() + + @property + def get_table( + self, + ) -> Callable[ + [metastore.GetTableRequest], Union[metastore.Table, Awaitable[metastore.Table]] + ]: + raise NotImplementedError() + + @property + def list_tables( + self, + ) -> Callable[ + [metastore.ListTablesRequest], + Union[metastore.ListTablesResponse, Awaitable[metastore.ListTablesResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("MetastoreServiceTransport",) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/transports/grpc.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/transports/grpc.py new file mode 100644 index 000000000000..77dbff082c77 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/transports/grpc.py @@ -0,0 +1,630 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.cloud.bigquery.biglake_v1.types import metastore + +from .base import DEFAULT_CLIENT_INFO, MetastoreServiceTransport + + +class MetastoreServiceGrpcTransport(MetastoreServiceTransport): + """gRPC backend transport for MetastoreService. + + BigLake Metastore is a serverless, highly available, multi-tenant + runtime metastore for Google Cloud Data Analytics products. + + The BigLake Metastore API defines the following resource model: + + - A collection of Google Cloud projects: ``/projects/*`` + - Each project has a collection of available locations: + ``/locations/*`` + - Each location has a collection of catalogs: ``/catalogs/*`` + - Each catalog has a collection of databases: ``/databases/*`` + - Each database has a collection of tables: ``/tables/*`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "biglake.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "biglake.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_catalog( + self, + ) -> Callable[[metastore.CreateCatalogRequest], metastore.Catalog]: + r"""Return a callable for the create catalog method over gRPC. + + Creates a new catalog. + + Returns: + Callable[[~.CreateCatalogRequest], + ~.Catalog]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_catalog" not in self._stubs: + self._stubs["create_catalog"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/CreateCatalog", + request_serializer=metastore.CreateCatalogRequest.serialize, + response_deserializer=metastore.Catalog.deserialize, + ) + return self._stubs["create_catalog"] + + @property + def delete_catalog( + self, + ) -> Callable[[metastore.DeleteCatalogRequest], metastore.Catalog]: + r"""Return a callable for the delete catalog method over gRPC. + + Deletes an existing catalog specified by the catalog + ID. + + Returns: + Callable[[~.DeleteCatalogRequest], + ~.Catalog]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_catalog" not in self._stubs: + self._stubs["delete_catalog"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/DeleteCatalog", + request_serializer=metastore.DeleteCatalogRequest.serialize, + response_deserializer=metastore.Catalog.deserialize, + ) + return self._stubs["delete_catalog"] + + @property + def get_catalog(self) -> Callable[[metastore.GetCatalogRequest], metastore.Catalog]: + r"""Return a callable for the get catalog method over gRPC. + + Gets the catalog specified by the resource name. + + Returns: + Callable[[~.GetCatalogRequest], + ~.Catalog]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_catalog" not in self._stubs: + self._stubs["get_catalog"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/GetCatalog", + request_serializer=metastore.GetCatalogRequest.serialize, + response_deserializer=metastore.Catalog.deserialize, + ) + return self._stubs["get_catalog"] + + @property + def list_catalogs( + self, + ) -> Callable[[metastore.ListCatalogsRequest], metastore.ListCatalogsResponse]: + r"""Return a callable for the list catalogs method over gRPC. + + List all catalogs in a specified project. + + Returns: + Callable[[~.ListCatalogsRequest], + ~.ListCatalogsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_catalogs" not in self._stubs: + self._stubs["list_catalogs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/ListCatalogs", + request_serializer=metastore.ListCatalogsRequest.serialize, + response_deserializer=metastore.ListCatalogsResponse.deserialize, + ) + return self._stubs["list_catalogs"] + + @property + def create_database( + self, + ) -> Callable[[metastore.CreateDatabaseRequest], metastore.Database]: + r"""Return a callable for the create database method over gRPC. + + Creates a new database. + + Returns: + Callable[[~.CreateDatabaseRequest], + ~.Database]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_database" not in self._stubs: + self._stubs["create_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/CreateDatabase", + request_serializer=metastore.CreateDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["create_database"] + + @property + def delete_database( + self, + ) -> Callable[[metastore.DeleteDatabaseRequest], metastore.Database]: + r"""Return a callable for the delete database method over gRPC. + + Deletes an existing database specified by the + database ID. + + Returns: + Callable[[~.DeleteDatabaseRequest], + ~.Database]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_database" not in self._stubs: + self._stubs["delete_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/DeleteDatabase", + request_serializer=metastore.DeleteDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["delete_database"] + + @property + def update_database( + self, + ) -> Callable[[metastore.UpdateDatabaseRequest], metastore.Database]: + r"""Return a callable for the update database method over gRPC. + + Updates an existing database specified by the + database ID. + + Returns: + Callable[[~.UpdateDatabaseRequest], + ~.Database]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_database" not in self._stubs: + self._stubs["update_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/UpdateDatabase", + request_serializer=metastore.UpdateDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["update_database"] + + @property + def get_database( + self, + ) -> Callable[[metastore.GetDatabaseRequest], metastore.Database]: + r"""Return a callable for the get database method over gRPC. + + Gets the database specified by the resource name. + + Returns: + Callable[[~.GetDatabaseRequest], + ~.Database]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_database" not in self._stubs: + self._stubs["get_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/GetDatabase", + request_serializer=metastore.GetDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["get_database"] + + @property + def list_databases( + self, + ) -> Callable[[metastore.ListDatabasesRequest], metastore.ListDatabasesResponse]: + r"""Return a callable for the list databases method over gRPC. + + List all databases in a specified catalog. + + Returns: + Callable[[~.ListDatabasesRequest], + ~.ListDatabasesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_databases" not in self._stubs: + self._stubs["list_databases"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/ListDatabases", + request_serializer=metastore.ListDatabasesRequest.serialize, + response_deserializer=metastore.ListDatabasesResponse.deserialize, + ) + return self._stubs["list_databases"] + + @property + def create_table(self) -> Callable[[metastore.CreateTableRequest], metastore.Table]: + r"""Return a callable for the create table method over gRPC. + + Creates a new table. + + Returns: + Callable[[~.CreateTableRequest], + ~.Table]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_table" not in self._stubs: + self._stubs["create_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/CreateTable", + request_serializer=metastore.CreateTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["create_table"] + + @property + def delete_table(self) -> Callable[[metastore.DeleteTableRequest], metastore.Table]: + r"""Return a callable for the delete table method over gRPC. + + Deletes an existing table specified by the table ID. + + Returns: + Callable[[~.DeleteTableRequest], + ~.Table]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_table" not in self._stubs: + self._stubs["delete_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/DeleteTable", + request_serializer=metastore.DeleteTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["delete_table"] + + @property + def update_table(self) -> Callable[[metastore.UpdateTableRequest], metastore.Table]: + r"""Return a callable for the update table method over gRPC. + + Updates an existing table specified by the table ID. + + Returns: + Callable[[~.UpdateTableRequest], + ~.Table]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_table" not in self._stubs: + self._stubs["update_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/UpdateTable", + request_serializer=metastore.UpdateTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["update_table"] + + @property + def rename_table(self) -> Callable[[metastore.RenameTableRequest], metastore.Table]: + r"""Return a callable for the rename table method over gRPC. + + Renames an existing table specified by the table ID. + + Returns: + Callable[[~.RenameTableRequest], + ~.Table]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_table" not in self._stubs: + self._stubs["rename_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/RenameTable", + request_serializer=metastore.RenameTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["rename_table"] + + @property + def get_table(self) -> Callable[[metastore.GetTableRequest], metastore.Table]: + r"""Return a callable for the get table method over gRPC. + + Gets the table specified by the resource name. + + Returns: + Callable[[~.GetTableRequest], + ~.Table]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_table" not in self._stubs: + self._stubs["get_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/GetTable", + request_serializer=metastore.GetTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["get_table"] + + @property + def list_tables( + self, + ) -> Callable[[metastore.ListTablesRequest], metastore.ListTablesResponse]: + r"""Return a callable for the list tables method over gRPC. + + List all tables in a specified database. + + Returns: + Callable[[~.ListTablesRequest], + ~.ListTablesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tables" not in self._stubs: + self._stubs["list_tables"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/ListTables", + request_serializer=metastore.ListTablesRequest.serialize, + response_deserializer=metastore.ListTablesResponse.deserialize, + ) + return self._stubs["list_tables"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("MetastoreServiceGrpcTransport",) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/transports/grpc_asyncio.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..2ebeebed02d0 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/transports/grpc_asyncio.py @@ -0,0 +1,647 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.bigquery.biglake_v1.types import metastore + +from .base import DEFAULT_CLIENT_INFO, MetastoreServiceTransport +from .grpc import MetastoreServiceGrpcTransport + + +class MetastoreServiceGrpcAsyncIOTransport(MetastoreServiceTransport): + """gRPC AsyncIO backend transport for MetastoreService. + + BigLake Metastore is a serverless, highly available, multi-tenant + runtime metastore for Google Cloud Data Analytics products. + + The BigLake Metastore API defines the following resource model: + + - A collection of Google Cloud projects: ``/projects/*`` + - Each project has a collection of available locations: + ``/locations/*`` + - Each location has a collection of catalogs: ``/catalogs/*`` + - Each catalog has a collection of databases: ``/databases/*`` + - Each database has a collection of tables: ``/tables/*`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "biglake.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "biglake.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_catalog( + self, + ) -> Callable[[metastore.CreateCatalogRequest], Awaitable[metastore.Catalog]]: + r"""Return a callable for the create catalog method over gRPC. + + Creates a new catalog. + + Returns: + Callable[[~.CreateCatalogRequest], + Awaitable[~.Catalog]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_catalog" not in self._stubs: + self._stubs["create_catalog"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/CreateCatalog", + request_serializer=metastore.CreateCatalogRequest.serialize, + response_deserializer=metastore.Catalog.deserialize, + ) + return self._stubs["create_catalog"] + + @property + def delete_catalog( + self, + ) -> Callable[[metastore.DeleteCatalogRequest], Awaitable[metastore.Catalog]]: + r"""Return a callable for the delete catalog method over gRPC. + + Deletes an existing catalog specified by the catalog + ID. + + Returns: + Callable[[~.DeleteCatalogRequest], + Awaitable[~.Catalog]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_catalog" not in self._stubs: + self._stubs["delete_catalog"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/DeleteCatalog", + request_serializer=metastore.DeleteCatalogRequest.serialize, + response_deserializer=metastore.Catalog.deserialize, + ) + return self._stubs["delete_catalog"] + + @property + def get_catalog( + self, + ) -> Callable[[metastore.GetCatalogRequest], Awaitable[metastore.Catalog]]: + r"""Return a callable for the get catalog method over gRPC. + + Gets the catalog specified by the resource name. + + Returns: + Callable[[~.GetCatalogRequest], + Awaitable[~.Catalog]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_catalog" not in self._stubs: + self._stubs["get_catalog"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/GetCatalog", + request_serializer=metastore.GetCatalogRequest.serialize, + response_deserializer=metastore.Catalog.deserialize, + ) + return self._stubs["get_catalog"] + + @property + def list_catalogs( + self, + ) -> Callable[ + [metastore.ListCatalogsRequest], Awaitable[metastore.ListCatalogsResponse] + ]: + r"""Return a callable for the list catalogs method over gRPC. + + List all catalogs in a specified project. + + Returns: + Callable[[~.ListCatalogsRequest], + Awaitable[~.ListCatalogsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_catalogs" not in self._stubs: + self._stubs["list_catalogs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/ListCatalogs", + request_serializer=metastore.ListCatalogsRequest.serialize, + response_deserializer=metastore.ListCatalogsResponse.deserialize, + ) + return self._stubs["list_catalogs"] + + @property + def create_database( + self, + ) -> Callable[[metastore.CreateDatabaseRequest], Awaitable[metastore.Database]]: + r"""Return a callable for the create database method over gRPC. + + Creates a new database. + + Returns: + Callable[[~.CreateDatabaseRequest], + Awaitable[~.Database]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_database" not in self._stubs: + self._stubs["create_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/CreateDatabase", + request_serializer=metastore.CreateDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["create_database"] + + @property + def delete_database( + self, + ) -> Callable[[metastore.DeleteDatabaseRequest], Awaitable[metastore.Database]]: + r"""Return a callable for the delete database method over gRPC. + + Deletes an existing database specified by the + database ID. + + Returns: + Callable[[~.DeleteDatabaseRequest], + Awaitable[~.Database]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_database" not in self._stubs: + self._stubs["delete_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/DeleteDatabase", + request_serializer=metastore.DeleteDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["delete_database"] + + @property + def update_database( + self, + ) -> Callable[[metastore.UpdateDatabaseRequest], Awaitable[metastore.Database]]: + r"""Return a callable for the update database method over gRPC. + + Updates an existing database specified by the + database ID. + + Returns: + Callable[[~.UpdateDatabaseRequest], + Awaitable[~.Database]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_database" not in self._stubs: + self._stubs["update_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/UpdateDatabase", + request_serializer=metastore.UpdateDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["update_database"] + + @property + def get_database( + self, + ) -> Callable[[metastore.GetDatabaseRequest], Awaitable[metastore.Database]]: + r"""Return a callable for the get database method over gRPC. + + Gets the database specified by the resource name. + + Returns: + Callable[[~.GetDatabaseRequest], + Awaitable[~.Database]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_database" not in self._stubs: + self._stubs["get_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/GetDatabase", + request_serializer=metastore.GetDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["get_database"] + + @property + def list_databases( + self, + ) -> Callable[ + [metastore.ListDatabasesRequest], Awaitable[metastore.ListDatabasesResponse] + ]: + r"""Return a callable for the list databases method over gRPC. + + List all databases in a specified catalog. + + Returns: + Callable[[~.ListDatabasesRequest], + Awaitable[~.ListDatabasesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_databases" not in self._stubs: + self._stubs["list_databases"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/ListDatabases", + request_serializer=metastore.ListDatabasesRequest.serialize, + response_deserializer=metastore.ListDatabasesResponse.deserialize, + ) + return self._stubs["list_databases"] + + @property + def create_table( + self, + ) -> Callable[[metastore.CreateTableRequest], Awaitable[metastore.Table]]: + r"""Return a callable for the create table method over gRPC. + + Creates a new table. + + Returns: + Callable[[~.CreateTableRequest], + Awaitable[~.Table]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_table" not in self._stubs: + self._stubs["create_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/CreateTable", + request_serializer=metastore.CreateTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["create_table"] + + @property + def delete_table( + self, + ) -> Callable[[metastore.DeleteTableRequest], Awaitable[metastore.Table]]: + r"""Return a callable for the delete table method over gRPC. + + Deletes an existing table specified by the table ID. + + Returns: + Callable[[~.DeleteTableRequest], + Awaitable[~.Table]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_table" not in self._stubs: + self._stubs["delete_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/DeleteTable", + request_serializer=metastore.DeleteTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["delete_table"] + + @property + def update_table( + self, + ) -> Callable[[metastore.UpdateTableRequest], Awaitable[metastore.Table]]: + r"""Return a callable for the update table method over gRPC. + + Updates an existing table specified by the table ID. + + Returns: + Callable[[~.UpdateTableRequest], + Awaitable[~.Table]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_table" not in self._stubs: + self._stubs["update_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/UpdateTable", + request_serializer=metastore.UpdateTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["update_table"] + + @property + def rename_table( + self, + ) -> Callable[[metastore.RenameTableRequest], Awaitable[metastore.Table]]: + r"""Return a callable for the rename table method over gRPC. + + Renames an existing table specified by the table ID. + + Returns: + Callable[[~.RenameTableRequest], + Awaitable[~.Table]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_table" not in self._stubs: + self._stubs["rename_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/RenameTable", + request_serializer=metastore.RenameTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["rename_table"] + + @property + def get_table( + self, + ) -> Callable[[metastore.GetTableRequest], Awaitable[metastore.Table]]: + r"""Return a callable for the get table method over gRPC. + + Gets the table specified by the resource name. + + Returns: + Callable[[~.GetTableRequest], + Awaitable[~.Table]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_table" not in self._stubs: + self._stubs["get_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/GetTable", + request_serializer=metastore.GetTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["get_table"] + + @property + def list_tables( + self, + ) -> Callable[ + [metastore.ListTablesRequest], Awaitable[metastore.ListTablesResponse] + ]: + r"""Return a callable for the list tables method over gRPC. + + List all tables in a specified database. + + Returns: + Callable[[~.ListTablesRequest], + Awaitable[~.ListTablesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tables" not in self._stubs: + self._stubs["list_tables"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1.MetastoreService/ListTables", + request_serializer=metastore.ListTablesRequest.serialize, + response_deserializer=metastore.ListTablesResponse.deserialize, + ) + return self._stubs["list_tables"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("MetastoreServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/transports/rest.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/transports/rest.py new file mode 100644 index 000000000000..9ed48127de89 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/services/metastore_service/transports/rest.py @@ -0,0 +1,2097 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.bigquery.biglake_v1.types import metastore + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import MetastoreServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MetastoreServiceRestInterceptor: + """Interceptor for MetastoreService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MetastoreServiceRestTransport. + + .. code-block:: python + class MyCustomMetastoreServiceInterceptor(MetastoreServiceRestInterceptor): + def pre_create_catalog(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_catalog(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_table(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_table(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_catalog(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_catalog(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_table(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_table(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_catalog(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_catalog(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_table(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_table(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_catalogs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_catalogs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_databases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_databases(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_tables(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_tables(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_rename_table(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_rename_table(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_table(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_table(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MetastoreServiceRestTransport(interceptor=MyCustomMetastoreServiceInterceptor()) + client = MetastoreServiceClient(transport=transport) + + + """ + + def pre_create_catalog( + self, + request: metastore.CreateCatalogRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metastore.CreateCatalogRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_catalog + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_create_catalog(self, response: metastore.Catalog) -> metastore.Catalog: + """Post-rpc interceptor for create_catalog + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_create_database( + self, + request: metastore.CreateDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metastore.CreateDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_create_database(self, response: metastore.Database) -> metastore.Database: + """Post-rpc interceptor for create_database + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_create_table( + self, request: metastore.CreateTableRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.CreateTableRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_table + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_create_table(self, response: metastore.Table) -> metastore.Table: + """Post-rpc interceptor for create_table + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_delete_catalog( + self, + request: metastore.DeleteCatalogRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metastore.DeleteCatalogRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_catalog + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_delete_catalog(self, response: metastore.Catalog) -> metastore.Catalog: + """Post-rpc interceptor for delete_catalog + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_delete_database( + self, + request: metastore.DeleteDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metastore.DeleteDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_delete_database(self, response: metastore.Database) -> metastore.Database: + """Post-rpc interceptor for delete_database + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_delete_table( + self, request: metastore.DeleteTableRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.DeleteTableRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_table + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_delete_table(self, response: metastore.Table) -> metastore.Table: + """Post-rpc interceptor for delete_table + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_get_catalog( + self, request: metastore.GetCatalogRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.GetCatalogRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_catalog + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_get_catalog(self, response: metastore.Catalog) -> metastore.Catalog: + """Post-rpc interceptor for get_catalog + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_get_database( + self, request: metastore.GetDatabaseRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.GetDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_get_database(self, response: metastore.Database) -> metastore.Database: + """Post-rpc interceptor for get_database + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_get_table( + self, request: metastore.GetTableRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.GetTableRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_table + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_get_table(self, response: metastore.Table) -> metastore.Table: + """Post-rpc interceptor for get_table + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_list_catalogs( + self, + request: metastore.ListCatalogsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metastore.ListCatalogsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_catalogs + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_list_catalogs( + self, response: metastore.ListCatalogsResponse + ) -> metastore.ListCatalogsResponse: + """Post-rpc interceptor for list_catalogs + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_list_databases( + self, + request: metastore.ListDatabasesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metastore.ListDatabasesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_databases + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_list_databases( + self, response: metastore.ListDatabasesResponse + ) -> metastore.ListDatabasesResponse: + """Post-rpc interceptor for list_databases + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_list_tables( + self, request: metastore.ListTablesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.ListTablesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_tables + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_list_tables( + self, response: metastore.ListTablesResponse + ) -> metastore.ListTablesResponse: + """Post-rpc interceptor for list_tables + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_rename_table( + self, request: metastore.RenameTableRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.RenameTableRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for rename_table + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_rename_table(self, response: metastore.Table) -> metastore.Table: + """Post-rpc interceptor for rename_table + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_update_database( + self, + request: metastore.UpdateDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metastore.UpdateDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_update_database(self, response: metastore.Database) -> metastore.Database: + """Post-rpc interceptor for update_database + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_update_table( + self, request: metastore.UpdateTableRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.UpdateTableRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_table + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_update_table(self, response: metastore.Table) -> metastore.Table: + """Post-rpc interceptor for update_table + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MetastoreServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: MetastoreServiceRestInterceptor + + +class MetastoreServiceRestTransport(MetastoreServiceTransport): + """REST backend transport for MetastoreService. + + BigLake Metastore is a serverless, highly available, multi-tenant + runtime metastore for Google Cloud Data Analytics products. + + The BigLake Metastore API defines the following resource model: + + - A collection of Google Cloud projects: ``/projects/*`` + - Each project has a collection of available locations: + ``/locations/*`` + - Each location has a collection of catalogs: ``/catalogs/*`` + - Each catalog has a collection of databases: ``/databases/*`` + - Each database has a collection of tables: ``/tables/*`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "biglake.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MetastoreServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MetastoreServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateCatalog(MetastoreServiceRestStub): + def __hash__(self): + return hash("CreateCatalog") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "catalogId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.CreateCatalogRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Call the create catalog method over HTTP. + + Args: + request (~.metastore.CreateCatalogRequest): + The request object. Request message for the CreateCatalog + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Catalog: + Catalog is the container of + databases. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/catalogs", + "body": "catalog", + }, + ] + request, metadata = self._interceptor.pre_create_catalog(request, metadata) + pb_request = metastore.CreateCatalogRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Catalog() + pb_resp = metastore.Catalog.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_catalog(resp) + return resp + + class _CreateDatabase(MetastoreServiceRestStub): + def __hash__(self): + return hash("CreateDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "databaseId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.CreateDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Call the create database method over HTTP. + + Args: + request (~.metastore.CreateDatabaseRequest): + The request object. Request message for the + CreateDatabase method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Database: + Database is the container of tables. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/catalogs/*}/databases", + "body": "database", + }, + ] + request, metadata = self._interceptor.pre_create_database(request, metadata) + pb_request = metastore.CreateDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Database() + pb_resp = metastore.Database.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_database(resp) + return resp + + class _CreateTable(MetastoreServiceRestStub): + def __hash__(self): + return hash("CreateTable") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "tableId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.CreateTableRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Call the create table method over HTTP. + + Args: + request (~.metastore.CreateTableRequest): + The request object. Request message for the CreateTable + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Table: + Represents a table. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables", + "body": "table", + }, + ] + request, metadata = self._interceptor.pre_create_table(request, metadata) + pb_request = metastore.CreateTableRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Table() + pb_resp = metastore.Table.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_table(resp) + return resp + + class _DeleteCatalog(MetastoreServiceRestStub): + def __hash__(self): + return hash("DeleteCatalog") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.DeleteCatalogRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Call the delete catalog method over HTTP. + + Args: + request (~.metastore.DeleteCatalogRequest): + The request object. Request message for the DeleteCatalog + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Catalog: + Catalog is the container of + databases. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/catalogs/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_catalog(request, metadata) + pb_request = metastore.DeleteCatalogRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Catalog() + pb_resp = metastore.Catalog.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_catalog(resp) + return resp + + class _DeleteDatabase(MetastoreServiceRestStub): + def __hash__(self): + return hash("DeleteDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.DeleteDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Call the delete database method over HTTP. + + Args: + request (~.metastore.DeleteDatabaseRequest): + The request object. Request message for the + DeleteDatabase method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Database: + Database is the container of tables. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/catalogs/*/databases/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_database(request, metadata) + pb_request = metastore.DeleteDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Database() + pb_resp = metastore.Database.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_database(resp) + return resp + + class _DeleteTable(MetastoreServiceRestStub): + def __hash__(self): + return hash("DeleteTable") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.DeleteTableRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Call the delete table method over HTTP. + + Args: + request (~.metastore.DeleteTableRequest): + The request object. Request message for the DeleteTable + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Table: + Represents a table. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_table(request, metadata) + pb_request = metastore.DeleteTableRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Table() + pb_resp = metastore.Table.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_table(resp) + return resp + + class _GetCatalog(MetastoreServiceRestStub): + def __hash__(self): + return hash("GetCatalog") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.GetCatalogRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Call the get catalog method over HTTP. + + Args: + request (~.metastore.GetCatalogRequest): + The request object. Request message for the GetCatalog + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Catalog: + Catalog is the container of + databases. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/catalogs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_catalog(request, metadata) + pb_request = metastore.GetCatalogRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Catalog() + pb_resp = metastore.Catalog.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_catalog(resp) + return resp + + class _GetDatabase(MetastoreServiceRestStub): + def __hash__(self): + return hash("GetDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.GetDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Call the get database method over HTTP. + + Args: + request (~.metastore.GetDatabaseRequest): + The request object. Request message for the GetDatabase + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Database: + Database is the container of tables. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/catalogs/*/databases/*}", + }, + ] + request, metadata = self._interceptor.pre_get_database(request, metadata) + pb_request = metastore.GetDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Database() + pb_resp = metastore.Database.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_database(resp) + return resp + + class _GetTable(MetastoreServiceRestStub): + def __hash__(self): + return hash("GetTable") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.GetTableRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Call the get table method over HTTP. + + Args: + request (~.metastore.GetTableRequest): + The request object. Request message for the GetTable + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Table: + Represents a table. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}", + }, + ] + request, metadata = self._interceptor.pre_get_table(request, metadata) + pb_request = metastore.GetTableRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Table() + pb_resp = metastore.Table.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_table(resp) + return resp + + class _ListCatalogs(MetastoreServiceRestStub): + def __hash__(self): + return hash("ListCatalogs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.ListCatalogsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.ListCatalogsResponse: + r"""Call the list catalogs method over HTTP. + + Args: + request (~.metastore.ListCatalogsRequest): + The request object. Request message for the ListCatalogs + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.ListCatalogsResponse: + Response message for the ListCatalogs + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/catalogs", + }, + ] + request, metadata = self._interceptor.pre_list_catalogs(request, metadata) + pb_request = metastore.ListCatalogsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.ListCatalogsResponse() + pb_resp = metastore.ListCatalogsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_catalogs(resp) + return resp + + class _ListDatabases(MetastoreServiceRestStub): + def __hash__(self): + return hash("ListDatabases") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.ListDatabasesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.ListDatabasesResponse: + r"""Call the list databases method over HTTP. + + Args: + request (~.metastore.ListDatabasesRequest): + The request object. Request message for the ListDatabases + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.ListDatabasesResponse: + Response message for the + ListDatabases method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/catalogs/*}/databases", + }, + ] + request, metadata = self._interceptor.pre_list_databases(request, metadata) + pb_request = metastore.ListDatabasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.ListDatabasesResponse() + pb_resp = metastore.ListDatabasesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_databases(resp) + return resp + + class _ListTables(MetastoreServiceRestStub): + def __hash__(self): + return hash("ListTables") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.ListTablesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.ListTablesResponse: + r"""Call the list tables method over HTTP. + + Args: + request (~.metastore.ListTablesRequest): + The request object. Request message for the ListTables + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.ListTablesResponse: + Response message for the ListTables + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables", + }, + ] + request, metadata = self._interceptor.pre_list_tables(request, metadata) + pb_request = metastore.ListTablesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.ListTablesResponse() + pb_resp = metastore.ListTablesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_tables(resp) + return resp + + class _RenameTable(MetastoreServiceRestStub): + def __hash__(self): + return hash("RenameTable") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.RenameTableRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Call the rename table method over HTTP. + + Args: + request (~.metastore.RenameTableRequest): + The request object. Request message for the RenameTable + method in MetastoreService + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Table: + Represents a table. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}:rename", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_rename_table(request, metadata) + pb_request = metastore.RenameTableRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Table() + pb_resp = metastore.Table.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_rename_table(resp) + return resp + + class _UpdateDatabase(MetastoreServiceRestStub): + def __hash__(self): + return hash("UpdateDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.UpdateDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Call the update database method over HTTP. + + Args: + request (~.metastore.UpdateDatabaseRequest): + The request object. Request message for the + UpdateDatabase method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Database: + Database is the container of tables. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{database.name=projects/*/locations/*/catalogs/*/databases/*}", + "body": "database", + }, + ] + request, metadata = self._interceptor.pre_update_database(request, metadata) + pb_request = metastore.UpdateDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Database() + pb_resp = metastore.Database.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_database(resp) + return resp + + class _UpdateTable(MetastoreServiceRestStub): + def __hash__(self): + return hash("UpdateTable") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.UpdateTableRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Call the update table method over HTTP. + + Args: + request (~.metastore.UpdateTableRequest): + The request object. Request message for the UpdateTable + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Table: + Represents a table. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{table.name=projects/*/locations/*/catalogs/*/databases/*/tables/*}", + "body": "table", + }, + ] + request, metadata = self._interceptor.pre_update_table(request, metadata) + pb_request = metastore.UpdateTableRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Table() + pb_resp = metastore.Table.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_table(resp) + return resp + + @property + def create_catalog( + self, + ) -> Callable[[metastore.CreateCatalogRequest], metastore.Catalog]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCatalog(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_database( + self, + ) -> Callable[[metastore.CreateDatabaseRequest], metastore.Database]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_table(self) -> Callable[[metastore.CreateTableRequest], metastore.Table]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTable(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_catalog( + self, + ) -> Callable[[metastore.DeleteCatalogRequest], metastore.Catalog]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCatalog(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_database( + self, + ) -> Callable[[metastore.DeleteDatabaseRequest], metastore.Database]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_table(self) -> Callable[[metastore.DeleteTableRequest], metastore.Table]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTable(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_catalog(self) -> Callable[[metastore.GetCatalogRequest], metastore.Catalog]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCatalog(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_database( + self, + ) -> Callable[[metastore.GetDatabaseRequest], metastore.Database]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_table(self) -> Callable[[metastore.GetTableRequest], metastore.Table]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTable(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_catalogs( + self, + ) -> Callable[[metastore.ListCatalogsRequest], metastore.ListCatalogsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCatalogs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_databases( + self, + ) -> Callable[[metastore.ListDatabasesRequest], metastore.ListDatabasesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDatabases(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_tables( + self, + ) -> Callable[[metastore.ListTablesRequest], metastore.ListTablesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTables(self._session, self._host, self._interceptor) # type: ignore + + @property + def rename_table(self) -> Callable[[metastore.RenameTableRequest], metastore.Table]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RenameTable(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_database( + self, + ) -> Callable[[metastore.UpdateDatabaseRequest], metastore.Database]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_table(self) -> Callable[[metastore.UpdateTableRequest], metastore.Table]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTable(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("MetastoreServiceRestTransport",) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/types/__init__.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/types/__init__.py new file mode 100644 index 000000000000..9f7fea94dd65 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/types/__init__.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .metastore import ( + Catalog, + CreateCatalogRequest, + CreateDatabaseRequest, + CreateTableRequest, + Database, + DeleteCatalogRequest, + DeleteDatabaseRequest, + DeleteTableRequest, + GetCatalogRequest, + GetDatabaseRequest, + GetTableRequest, + HiveDatabaseOptions, + HiveTableOptions, + ListCatalogsRequest, + ListCatalogsResponse, + ListDatabasesRequest, + ListDatabasesResponse, + ListTablesRequest, + ListTablesResponse, + RenameTableRequest, + Table, + TableView, + UpdateDatabaseRequest, + UpdateTableRequest, +) + +__all__ = ( + "Catalog", + "CreateCatalogRequest", + "CreateDatabaseRequest", + "CreateTableRequest", + "Database", + "DeleteCatalogRequest", + "DeleteDatabaseRequest", + "DeleteTableRequest", + "GetCatalogRequest", + "GetDatabaseRequest", + "GetTableRequest", + "HiveDatabaseOptions", + "HiveTableOptions", + "ListCatalogsRequest", + "ListCatalogsResponse", + "ListDatabasesRequest", + "ListDatabasesResponse", + "ListTablesRequest", + "ListTablesResponse", + "RenameTableRequest", + "Table", + "UpdateDatabaseRequest", + "UpdateTableRequest", + "TableView", +) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/types/metastore.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/types/metastore.py new file mode 100644 index 000000000000..8aedeca9da0c --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1/types/metastore.py @@ -0,0 +1,866 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.bigquery.biglake.v1", + manifest={ + "TableView", + "Catalog", + "Database", + "Table", + "CreateCatalogRequest", + "DeleteCatalogRequest", + "GetCatalogRequest", + "ListCatalogsRequest", + "ListCatalogsResponse", + "CreateDatabaseRequest", + "DeleteDatabaseRequest", + "UpdateDatabaseRequest", + "GetDatabaseRequest", + "ListDatabasesRequest", + "ListDatabasesResponse", + "CreateTableRequest", + "DeleteTableRequest", + "UpdateTableRequest", + "RenameTableRequest", + "GetTableRequest", + "ListTablesRequest", + "ListTablesResponse", + "HiveDatabaseOptions", + "HiveTableOptions", + }, +) + + +class TableView(proto.Enum): + r"""View on Table. Represents which fields will be populated for + calls that return Table objects. + + Values: + TABLE_VIEW_UNSPECIFIED (0): + Default value. The API will default to the + BASIC view. + BASIC (1): + Include only table names. + This is the default value. + FULL (2): + Include everything. + """ + TABLE_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + +class Catalog(proto.Message): + r"""Catalog is the container of databases. + + Attributes: + name (str): + Output only. The resource name. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time of the + catalog. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last modification time of + the catalog. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The deletion time of the + catalog. Only set after the catalog is deleted. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when this catalog is + considered expired. Only set after the catalog + is deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class Database(proto.Message): + r"""Database is the container of tables. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hive_options (google.cloud.bigquery.biglake_v1.types.HiveDatabaseOptions): + Options of a Hive database. + + This field is a member of `oneof`_ ``options``. + name (str): + Output only. The resource name. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time of the + database. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last modification time of + the database. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The deletion time of the + database. Only set after the database is + deleted. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when this database is + considered expired. Only set after the database + is deleted. + type_ (google.cloud.bigquery.biglake_v1.types.Database.Type): + The database type. + """ + + class Type(proto.Enum): + r"""The database type. + + Values: + TYPE_UNSPECIFIED (0): + The type is not specified. + HIVE (1): + Represents a database storing tables + compatible with Hive Metastore tables. + """ + TYPE_UNSPECIFIED = 0 + HIVE = 1 + + hive_options: "HiveDatabaseOptions" = proto.Field( + proto.MESSAGE, + number=7, + oneof="options", + message="HiveDatabaseOptions", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + type_: Type = proto.Field( + proto.ENUM, + number=6, + enum=Type, + ) + + +class Table(proto.Message): + r"""Represents a table. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hive_options (google.cloud.bigquery.biglake_v1.types.HiveTableOptions): + Options of a Hive table. + + This field is a member of `oneof`_ ``options``. + name (str): + Output only. The resource name. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time of the table. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last modification time of + the table. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The deletion time of the table. + Only set after the table is deleted. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when this table is + considered expired. Only set after the table is + deleted. + type_ (google.cloud.bigquery.biglake_v1.types.Table.Type): + The table type. + etag (str): + The checksum of a table object computed by + the server based on the value of other fields. + It may be sent on update requests to ensure the + client has an up-to-date value before + proceeding. It is only checked for update table + operations. + """ + + class Type(proto.Enum): + r"""The table type. + + Values: + TYPE_UNSPECIFIED (0): + The type is not specified. + HIVE (1): + Represents a table compatible with Hive + Metastore tables. + """ + TYPE_UNSPECIFIED = 0 + HIVE = 1 + + hive_options: "HiveTableOptions" = proto.Field( + proto.MESSAGE, + number=7, + oneof="options", + message="HiveTableOptions", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + type_: Type = proto.Field( + proto.ENUM, + number=6, + enum=Type, + ) + etag: str = proto.Field( + proto.STRING, + number=8, + ) + + +class CreateCatalogRequest(proto.Message): + r"""Request message for the CreateCatalog method. + + Attributes: + parent (str): + Required. The parent resource where this catalog will be + created. Format: + projects/{project_id_or_number}/locations/{location_id} + catalog (google.cloud.bigquery.biglake_v1.types.Catalog): + Required. The catalog to create. The ``name`` field does not + need to be provided. + catalog_id (str): + Required. The ID to use for the catalog, + which will become the final component of the + catalog's resource name. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + catalog: "Catalog" = proto.Field( + proto.MESSAGE, + number=2, + message="Catalog", + ) + catalog_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteCatalogRequest(proto.Message): + r"""Request message for the DeleteCatalog method. + + Attributes: + name (str): + Required. The name of the catalog to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetCatalogRequest(proto.Message): + r"""Request message for the GetCatalog method. + + Attributes: + name (str): + Required. The name of the catalog to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListCatalogsRequest(proto.Message): + r"""Request message for the ListCatalogs method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + catalogs. Format: + projects/{project_id_or_number}/locations/{location_id} + page_size (int): + The maximum number of catalogs to return. The + service may return fewer than this value. + If unspecified, at most 50 catalogs will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + A page token, received from a previous ``ListCatalogs`` + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListCatalogs`` must match the call that provided the page + token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListCatalogsResponse(proto.Message): + r"""Response message for the ListCatalogs method. + + Attributes: + catalogs (MutableSequence[google.cloud.bigquery.biglake_v1.types.Catalog]): + The catalogs from the specified project. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + catalogs: MutableSequence["Catalog"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Catalog", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateDatabaseRequest(proto.Message): + r"""Request message for the CreateDatabase method. + + Attributes: + parent (str): + Required. The parent resource where this database will be + created. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + database (google.cloud.bigquery.biglake_v1.types.Database): + Required. The database to create. The ``name`` field does + not need to be provided. + database_id (str): + Required. The ID to use for the database, + which will become the final component of the + database's resource name. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + database: "Database" = proto.Field( + proto.MESSAGE, + number=2, + message="Database", + ) + database_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteDatabaseRequest(proto.Message): + r"""Request message for the DeleteDatabase method. + + Attributes: + name (str): + Required. The name of the database to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDatabaseRequest(proto.Message): + r"""Request message for the UpdateDatabase method. + + Attributes: + database (google.cloud.bigquery.biglake_v1.types.Database): + Required. The database to update. + + The database's ``name`` field is used to identify the + database to update. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. + + For the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are allowed + to update. + """ + + database: "Database" = proto.Field( + proto.MESSAGE, + number=1, + message="Database", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetDatabaseRequest(proto.Message): + r"""Request message for the GetDatabase method. + + Attributes: + name (str): + Required. The name of the database to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDatabasesRequest(proto.Message): + r"""Request message for the ListDatabases method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + databases. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + page_size (int): + The maximum number of databases to return. + The service may return fewer than this value. If + unspecified, at most 50 databases will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + A page token, received from a previous ``ListDatabases`` + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListDatabases`` must match the call that provided the page + token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDatabasesResponse(proto.Message): + r"""Response message for the ListDatabases method. + + Attributes: + databases (MutableSequence[google.cloud.bigquery.biglake_v1.types.Database]): + The databases from the specified catalog. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + databases: MutableSequence["Database"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Database", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateTableRequest(proto.Message): + r"""Request message for the CreateTable method. + + Attributes: + parent (str): + Required. The parent resource where this table will be + created. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + table (google.cloud.bigquery.biglake_v1.types.Table): + Required. The table to create. The ``name`` field does not + need to be provided for the table creation. + table_id (str): + Required. The ID to use for the table, which + will become the final component of the table's + resource name. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + table: "Table" = proto.Field( + proto.MESSAGE, + number=2, + message="Table", + ) + table_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteTableRequest(proto.Message): + r"""Request message for the DeleteTable method. + + Attributes: + name (str): + Required. The name of the table to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateTableRequest(proto.Message): + r"""Request message for the UpdateTable method. + + Attributes: + table (google.cloud.bigquery.biglake_v1.types.Table): + Required. The table to update. + + The table's ``name`` field is used to identify the table to + update. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. + + For the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are allowed + to update. + """ + + table: "Table" = proto.Field( + proto.MESSAGE, + number=1, + message="Table", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class RenameTableRequest(proto.Message): + r"""Request message for the RenameTable method in + MetastoreService + + Attributes: + name (str): + Required. The table's ``name`` field is used to identify the + table to rename. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + new_name (str): + Required. The new ``name`` for the specified table, must be + in the same database. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + new_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTableRequest(proto.Message): + r"""Request message for the GetTable method. + + Attributes: + name (str): + Required. The name of the table to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTablesRequest(proto.Message): + r"""Request message for the ListTables method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of tables. + Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + page_size (int): + The maximum number of tables to return. The + service may return fewer than this value. + If unspecified, at most 50 tables will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + A page token, received from a previous ``ListTables`` call. + Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListTables`` must match the call that provided the page + token. + view (google.cloud.bigquery.biglake_v1.types.TableView): + The view for the returned tables. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + view: "TableView" = proto.Field( + proto.ENUM, + number=4, + enum="TableView", + ) + + +class ListTablesResponse(proto.Message): + r"""Response message for the ListTables method. + + Attributes: + tables (MutableSequence[google.cloud.bigquery.biglake_v1.types.Table]): + The tables from the specified database. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + tables: MutableSequence["Table"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Table", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class HiveDatabaseOptions(proto.Message): + r"""Options of a Hive database. + + Attributes: + location_uri (str): + Cloud Storage folder URI where the database + data is stored, starting with "gs://". + parameters (MutableMapping[str, str]): + Stores user supplied Hive database + parameters. + """ + + location_uri: str = proto.Field( + proto.STRING, + number=1, + ) + parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + + +class HiveTableOptions(proto.Message): + r"""Options of a Hive table. + + Attributes: + parameters (MutableMapping[str, str]): + Stores user supplied Hive table parameters. + table_type (str): + Hive table type. For example, MANAGED_TABLE, EXTERNAL_TABLE. + storage_descriptor (google.cloud.bigquery.biglake_v1.types.HiveTableOptions.StorageDescriptor): + Stores physical storage information of the + data. + """ + + class SerDeInfo(proto.Message): + r"""Serializer and deserializer information. + + Attributes: + serialization_lib (str): + The fully qualified Java class name of the + serialization library. + """ + + serialization_lib: str = proto.Field( + proto.STRING, + number=1, + ) + + class StorageDescriptor(proto.Message): + r"""Stores physical storage information of the data. + + Attributes: + location_uri (str): + Cloud Storage folder URI where the table data + is stored, starting with "gs://". + input_format (str): + The fully qualified Java class name of the + input format. + output_format (str): + The fully qualified Java class name of the + output format. + serde_info (google.cloud.bigquery.biglake_v1.types.HiveTableOptions.SerDeInfo): + Serializer and deserializer information. + """ + + location_uri: str = proto.Field( + proto.STRING, + number=1, + ) + input_format: str = proto.Field( + proto.STRING, + number=2, + ) + output_format: str = proto.Field( + proto.STRING, + number=3, + ) + serde_info: "HiveTableOptions.SerDeInfo" = proto.Field( + proto.MESSAGE, + number=4, + message="HiveTableOptions.SerDeInfo", + ) + + parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + table_type: str = proto.Field( + proto.STRING, + number=2, + ) + storage_descriptor: StorageDescriptor = proto.Field( + proto.MESSAGE, + number=3, + message=StorageDescriptor, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/__init__.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/__init__.py new file mode 100644 index 000000000000..cd606d2301e1 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/__init__.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.bigquery.biglake_v1alpha1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.metastore_service import ( + MetastoreServiceAsyncClient, + MetastoreServiceClient, +) +from .types.metastore import ( + Catalog, + CheckLockRequest, + CreateCatalogRequest, + CreateDatabaseRequest, + CreateLockRequest, + CreateTableRequest, + Database, + DeleteCatalogRequest, + DeleteDatabaseRequest, + DeleteLockRequest, + DeleteTableRequest, + GetCatalogRequest, + GetDatabaseRequest, + GetTableRequest, + HiveDatabaseOptions, + HiveTableOptions, + ListCatalogsRequest, + ListCatalogsResponse, + ListDatabasesRequest, + ListDatabasesResponse, + ListLocksRequest, + ListLocksResponse, + ListTablesRequest, + ListTablesResponse, + Lock, + RenameTableRequest, + Table, + TableView, + UpdateDatabaseRequest, + UpdateTableRequest, +) + +__all__ = ( + "MetastoreServiceAsyncClient", + "Catalog", + "CheckLockRequest", + "CreateCatalogRequest", + "CreateDatabaseRequest", + "CreateLockRequest", + "CreateTableRequest", + "Database", + "DeleteCatalogRequest", + "DeleteDatabaseRequest", + "DeleteLockRequest", + "DeleteTableRequest", + "GetCatalogRequest", + "GetDatabaseRequest", + "GetTableRequest", + "HiveDatabaseOptions", + "HiveTableOptions", + "ListCatalogsRequest", + "ListCatalogsResponse", + "ListDatabasesRequest", + "ListDatabasesResponse", + "ListLocksRequest", + "ListLocksResponse", + "ListTablesRequest", + "ListTablesResponse", + "Lock", + "MetastoreServiceClient", + "RenameTableRequest", + "Table", + "TableView", + "UpdateDatabaseRequest", + "UpdateTableRequest", +) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/gapic_metadata.json b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/gapic_metadata.json new file mode 100644 index 000000000000..3b31720b53bd --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/gapic_metadata.json @@ -0,0 +1,313 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.bigquery.biglake_v1alpha1", + "protoPackage": "google.cloud.bigquery.biglake.v1alpha1", + "schema": "1.0", + "services": { + "MetastoreService": { + "clients": { + "grpc": { + "libraryClient": "MetastoreServiceClient", + "rpcs": { + "CheckLock": { + "methods": [ + "check_lock" + ] + }, + "CreateCatalog": { + "methods": [ + "create_catalog" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "CreateLock": { + "methods": [ + "create_lock" + ] + }, + "CreateTable": { + "methods": [ + "create_table" + ] + }, + "DeleteCatalog": { + "methods": [ + "delete_catalog" + ] + }, + "DeleteDatabase": { + "methods": [ + "delete_database" + ] + }, + "DeleteLock": { + "methods": [ + "delete_lock" + ] + }, + "DeleteTable": { + "methods": [ + "delete_table" + ] + }, + "GetCatalog": { + "methods": [ + "get_catalog" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetTable": { + "methods": [ + "get_table" + ] + }, + "ListCatalogs": { + "methods": [ + "list_catalogs" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "ListLocks": { + "methods": [ + "list_locks" + ] + }, + "ListTables": { + "methods": [ + "list_tables" + ] + }, + "RenameTable": { + "methods": [ + "rename_table" + ] + }, + "UpdateDatabase": { + "methods": [ + "update_database" + ] + }, + "UpdateTable": { + "methods": [ + "update_table" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MetastoreServiceAsyncClient", + "rpcs": { + "CheckLock": { + "methods": [ + "check_lock" + ] + }, + "CreateCatalog": { + "methods": [ + "create_catalog" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "CreateLock": { + "methods": [ + "create_lock" + ] + }, + "CreateTable": { + "methods": [ + "create_table" + ] + }, + "DeleteCatalog": { + "methods": [ + "delete_catalog" + ] + }, + "DeleteDatabase": { + "methods": [ + "delete_database" + ] + }, + "DeleteLock": { + "methods": [ + "delete_lock" + ] + }, + "DeleteTable": { + "methods": [ + "delete_table" + ] + }, + "GetCatalog": { + "methods": [ + "get_catalog" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetTable": { + "methods": [ + "get_table" + ] + }, + "ListCatalogs": { + "methods": [ + "list_catalogs" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "ListLocks": { + "methods": [ + "list_locks" + ] + }, + "ListTables": { + "methods": [ + "list_tables" + ] + }, + "RenameTable": { + "methods": [ + "rename_table" + ] + }, + "UpdateDatabase": { + "methods": [ + "update_database" + ] + }, + "UpdateTable": { + "methods": [ + "update_table" + ] + } + } + }, + "rest": { + "libraryClient": "MetastoreServiceClient", + "rpcs": { + "CheckLock": { + "methods": [ + "check_lock" + ] + }, + "CreateCatalog": { + "methods": [ + "create_catalog" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "CreateLock": { + "methods": [ + "create_lock" + ] + }, + "CreateTable": { + "methods": [ + "create_table" + ] + }, + "DeleteCatalog": { + "methods": [ + "delete_catalog" + ] + }, + "DeleteDatabase": { + "methods": [ + "delete_database" + ] + }, + "DeleteLock": { + "methods": [ + "delete_lock" + ] + }, + "DeleteTable": { + "methods": [ + "delete_table" + ] + }, + "GetCatalog": { + "methods": [ + "get_catalog" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetTable": { + "methods": [ + "get_table" + ] + }, + "ListCatalogs": { + "methods": [ + "list_catalogs" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "ListLocks": { + "methods": [ + "list_locks" + ] + }, + "ListTables": { + "methods": [ + "list_tables" + ] + }, + "RenameTable": { + "methods": [ + "rename_table" + ] + }, + "UpdateDatabase": { + "methods": [ + "update_database" + ] + }, + "UpdateTable": { + "methods": [ + "update_table" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/gapic_version.py new file mode 100644 index 000000000000..c050f9a7e392 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.3.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/py.typed b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/py.typed new file mode 100644 index 000000000000..70e9a3b83398 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-biglake package uses inline types. diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/__init__.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/__init__.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/__init__.py new file mode 100644 index 000000000000..dbacb2f78046 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import MetastoreServiceAsyncClient +from .client import MetastoreServiceClient + +__all__ = ( + "MetastoreServiceClient", + "MetastoreServiceAsyncClient", +) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/async_client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/async_client.py new file mode 100644 index 000000000000..e4f098cc38ff --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/async_client.py @@ -0,0 +1,2347 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery.biglake_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.bigquery.biglake_v1alpha1.services.metastore_service import pagers +from google.cloud.bigquery.biglake_v1alpha1.types import metastore + +from .client import MetastoreServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, MetastoreServiceTransport +from .transports.grpc_asyncio import MetastoreServiceGrpcAsyncIOTransport + + +class MetastoreServiceAsyncClient: + """BigLake Metastore is a serverless, highly available, multi-tenant + runtime metastore for Google Cloud Data Analytics products. + + The BigLake Metastore API defines the following resource model: + + - A collection of Google Cloud projects: ``/projects/*`` + - Each project has a collection of available locations: + ``/locations/*`` + - Each location has a collection of catalogs: ``/catalogs/*`` + - Each catalog has a collection of databases: ``/databases/*`` + - Each database has a collection of tables: ``/tables/*`` + """ + + _client: MetastoreServiceClient + + DEFAULT_ENDPOINT = MetastoreServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MetastoreServiceClient.DEFAULT_MTLS_ENDPOINT + + catalog_path = staticmethod(MetastoreServiceClient.catalog_path) + parse_catalog_path = staticmethod(MetastoreServiceClient.parse_catalog_path) + database_path = staticmethod(MetastoreServiceClient.database_path) + parse_database_path = staticmethod(MetastoreServiceClient.parse_database_path) + lock_path = staticmethod(MetastoreServiceClient.lock_path) + parse_lock_path = staticmethod(MetastoreServiceClient.parse_lock_path) + table_path = staticmethod(MetastoreServiceClient.table_path) + parse_table_path = staticmethod(MetastoreServiceClient.parse_table_path) + common_billing_account_path = staticmethod( + MetastoreServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MetastoreServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(MetastoreServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + MetastoreServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + MetastoreServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + MetastoreServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(MetastoreServiceClient.common_project_path) + parse_common_project_path = staticmethod( + MetastoreServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(MetastoreServiceClient.common_location_path) + parse_common_location_path = staticmethod( + MetastoreServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetastoreServiceAsyncClient: The constructed client. + """ + return MetastoreServiceClient.from_service_account_info.__func__(MetastoreServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetastoreServiceAsyncClient: The constructed client. + """ + return MetastoreServiceClient.from_service_account_file.__func__(MetastoreServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MetastoreServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MetastoreServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MetastoreServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(MetastoreServiceClient).get_transport_class, type(MetastoreServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, MetastoreServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metastore service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.MetastoreServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MetastoreServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_catalog( + self, + request: Optional[Union[metastore.CreateCatalogRequest, dict]] = None, + *, + parent: Optional[str] = None, + catalog: Optional[metastore.Catalog] = None, + catalog_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Creates a new catalog. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_create_catalog(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CreateCatalogRequest( + parent="parent_value", + catalog_id="catalog_id_value", + ) + + # Make the request + response = await client.create_catalog(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.CreateCatalogRequest, dict]]): + The request object. Request message for the CreateCatalog + method. + parent (:class:`str`): + Required. The parent resource where this catalog will be + created. Format: + projects/{project_id_or_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + catalog (:class:`google.cloud.bigquery.biglake_v1alpha1.types.Catalog`): + Required. The catalog to create. The ``name`` field does + not need to be provided. + + This corresponds to the ``catalog`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + catalog_id (:class:`str`): + Required. The ID to use for the + catalog, which will become the final + component of the catalog's resource + name. + + This corresponds to the ``catalog_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Catalog: + Catalog is the container of + databases. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, catalog, catalog_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.CreateCatalogRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if catalog is not None: + request.catalog = catalog + if catalog_id is not None: + request.catalog_id = catalog_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_catalog, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_catalog( + self, + request: Optional[Union[metastore.DeleteCatalogRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Deletes an existing catalog specified by the catalog + ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_delete_catalog(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteCatalogRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_catalog(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.DeleteCatalogRequest, dict]]): + The request object. Request message for the DeleteCatalog + method. + name (:class:`str`): + Required. The name of the catalog to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Catalog: + Catalog is the container of + databases. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.DeleteCatalogRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_catalog, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_catalog( + self, + request: Optional[Union[metastore.GetCatalogRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Gets the catalog specified by the resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_get_catalog(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.GetCatalogRequest( + name="name_value", + ) + + # Make the request + response = await client.get_catalog(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.GetCatalogRequest, dict]]): + The request object. Request message for the GetCatalog + method. + name (:class:`str`): + Required. The name of the catalog to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Catalog: + Catalog is the container of + databases. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.GetCatalogRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_catalog, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_catalogs( + self, + request: Optional[Union[metastore.ListCatalogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCatalogsAsyncPager: + r"""List all catalogs in a specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_list_catalogs(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListCatalogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_catalogs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.ListCatalogsRequest, dict]]): + The request object. Request message for the ListCatalogs + method. + parent (:class:`str`): + Required. The parent, which owns this collection of + catalogs. Format: + projects/{project_id_or_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListCatalogsAsyncPager: + Response message for the ListCatalogs + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.ListCatalogsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_catalogs, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListCatalogsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_database( + self, + request: Optional[Union[metastore.CreateDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + database: Optional[metastore.Database] = None, + database_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Creates a new database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_create_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = await client.create_database(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.CreateDatabaseRequest, dict]]): + The request object. Request message for the + CreateDatabase method. + parent (:class:`str`): + Required. The parent resource where this database will + be created. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database (:class:`google.cloud.bigquery.biglake_v1alpha1.types.Database`): + Required. The database to create. The ``name`` field + does not need to be provided. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (:class:`str`): + Required. The ID to use for the + database, which will become the final + component of the database's resource + name. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, database, database_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.CreateDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database is not None: + request.database = database + if database_id is not None: + request.database_id = database_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_database, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_database( + self, + request: Optional[Union[metastore.DeleteDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Deletes an existing database specified by the + database ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_delete_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteDatabaseRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_database(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.DeleteDatabaseRequest, dict]]): + The request object. Request message for the + DeleteDatabase method. + name (:class:`str`): + Required. The name of the database to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.DeleteDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_database, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_database( + self, + request: Optional[Union[metastore.UpdateDatabaseRequest, dict]] = None, + *, + database: Optional[metastore.Database] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Updates an existing database specified by the + database ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_update_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.UpdateDatabaseRequest( + ) + + # Make the request + response = await client.update_database(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.UpdateDatabaseRequest, dict]]): + The request object. Request message for the + UpdateDatabase method. + database (:class:`google.cloud.bigquery.biglake_v1alpha1.types.Database`): + Required. The database to update. + + The database's ``name`` field is used to identify the + database to update. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to update. + + For the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are + allowed to update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.UpdateDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_database, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("database.name", request.database.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_database( + self, + request: Optional[Union[metastore.GetDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Gets the database specified by the resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_get_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = await client.get_database(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.GetDatabaseRequest, dict]]): + The request object. Request message for the GetDatabase + method. + name (:class:`str`): + Required. The name of the database to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.GetDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_database, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_databases( + self, + request: Optional[Union[metastore.ListDatabasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatabasesAsyncPager: + r"""List all databases in a specified catalog. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_list_databases(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.ListDatabasesRequest, dict]]): + The request object. Request message for the ListDatabases + method. + parent (:class:`str`): + Required. The parent, which owns this collection of + databases. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListDatabasesAsyncPager: + Response message for the + ListDatabases method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.ListDatabasesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_databases, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatabasesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_table( + self, + request: Optional[Union[metastore.CreateTableRequest, dict]] = None, + *, + parent: Optional[str] = None, + table: Optional[metastore.Table] = None, + table_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Creates a new table. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_create_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CreateTableRequest( + parent="parent_value", + table_id="table_id_value", + ) + + # Make the request + response = await client.create_table(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.CreateTableRequest, dict]]): + The request object. Request message for the CreateTable + method. + parent (:class:`str`): + Required. The parent resource where this table will be + created. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + table (:class:`google.cloud.bigquery.biglake_v1alpha1.types.Table`): + Required. The table to create. The ``name`` field does + not need to be provided for the table creation. + + This corresponds to the ``table`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + table_id (:class:`str`): + Required. The ID to use for the + table, which will become the final + component of the table's resource name. + + This corresponds to the ``table_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, table, table_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.CreateTableRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if table is not None: + request.table = table + if table_id is not None: + request.table_id = table_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_table, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_table( + self, + request: Optional[Union[metastore.DeleteTableRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Deletes an existing table specified by the table ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_delete_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteTableRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_table(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.DeleteTableRequest, dict]]): + The request object. Request message for the DeleteTable + method. + name (:class:`str`): + Required. The name of the table to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.DeleteTableRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_table, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_table( + self, + request: Optional[Union[metastore.UpdateTableRequest, dict]] = None, + *, + table: Optional[metastore.Table] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Updates an existing table specified by the table ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_update_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.UpdateTableRequest( + ) + + # Make the request + response = await client.update_table(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.UpdateTableRequest, dict]]): + The request object. Request message for the UpdateTable + method. + table (:class:`google.cloud.bigquery.biglake_v1alpha1.types.Table`): + Required. The table to update. + + The table's ``name`` field is used to identify the table + to update. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``table`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to update. + + For the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are + allowed to update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([table, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.UpdateTableRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if table is not None: + request.table = table + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_table, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("table.name", request.table.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def rename_table( + self, + request: Optional[Union[metastore.RenameTableRequest, dict]] = None, + *, + name: Optional[str] = None, + new_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Renames an existing table specified by the table ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_rename_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.RenameTableRequest( + name="name_value", + new_name="new_name_value", + ) + + # Make the request + response = await client.rename_table(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.RenameTableRequest, dict]]): + The request object. Request message for the RenameTable + method in MetastoreService + name (:class:`str`): + Required. The table's ``name`` field is used to identify + the table to rename. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_name (:class:`str`): + Required. The new ``name`` for the specified table, must + be in the same database. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``new_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.RenameTableRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_name is not None: + request.new_name = new_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rename_table, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_table( + self, + request: Optional[Union[metastore.GetTableRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Gets the table specified by the resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_get_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.GetTableRequest( + name="name_value", + ) + + # Make the request + response = await client.get_table(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.GetTableRequest, dict]]): + The request object. Request message for the GetTable + method. + name (:class:`str`): + Required. The name of the table to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.GetTableRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_table, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_tables( + self, + request: Optional[Union[metastore.ListTablesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTablesAsyncPager: + r"""List all tables in a specified database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_list_tables(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListTablesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tables(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.ListTablesRequest, dict]]): + The request object. Request message for the ListTables + method. + parent (:class:`str`): + Required. The parent, which owns this collection of + tables. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListTablesAsyncPager: + Response message for the ListTables + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.ListTablesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tables, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTablesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_lock( + self, + request: Optional[Union[metastore.CreateLockRequest, dict]] = None, + *, + parent: Optional[str] = None, + lock: Optional[metastore.Lock] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Lock: + r"""Creates a new lock. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_create_lock(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + lock = biglake_v1alpha1.Lock() + lock.table_id = "table_id_value" + + request = biglake_v1alpha1.CreateLockRequest( + parent="parent_value", + lock=lock, + ) + + # Make the request + response = await client.create_lock(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.CreateLockRequest, dict]]): + The request object. Request message for the CreateLock + method. + parent (:class:`str`): + Required. The parent resource where this lock will be + created. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lock (:class:`google.cloud.bigquery.biglake_v1alpha1.types.Lock`): + Required. The lock to create. The ``name`` field does + not need to be provided for the lock creation. + + This corresponds to the ``lock`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Lock: + Represents a lock. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, lock]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.CreateLockRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if lock is not None: + request.lock = lock + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_lock, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_lock( + self, + request: Optional[Union[metastore.DeleteLockRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing lock specified by the lock ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_delete_lock(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteLockRequest( + name="name_value", + ) + + # Make the request + await client.delete_lock(request=request) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.DeleteLockRequest, dict]]): + The request object. Request message for the DeleteLock + method. + name (:class:`str`): + Required. The name of the lock to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.DeleteLockRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_lock, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def check_lock( + self, + request: Optional[Union[metastore.CheckLockRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Lock: + r"""Checks the state of a lock specified by the lock ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_check_lock(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CheckLockRequest( + name="name_value", + ) + + # Make the request + response = await client.check_lock(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.CheckLockRequest, dict]]): + The request object. Request message for the CheckLock + method. + name (:class:`str`): + Required. The name of the lock to check. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Lock: + Represents a lock. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.CheckLockRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.check_lock, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locks( + self, + request: Optional[Union[metastore.ListLocksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLocksAsyncPager: + r"""List all locks in a specified database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + async def sample_list_locks(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListLocksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_locks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery.biglake_v1alpha1.types.ListLocksRequest, dict]]): + The request object. Request message for the ListLocks + method. + parent (:class:`str`): + Required. The parent, which owns this collection of + locks. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListLocksAsyncPager: + Response message for the ListLocks + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.ListLocksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locks, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLocksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MetastoreServiceAsyncClient",) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/client.py new file mode 100644 index 000000000000..052a2ff41cb6 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/client.py @@ -0,0 +1,2653 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery.biglake_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.bigquery.biglake_v1alpha1.services.metastore_service import pagers +from google.cloud.bigquery.biglake_v1alpha1.types import metastore + +from .transports.base import DEFAULT_CLIENT_INFO, MetastoreServiceTransport +from .transports.grpc import MetastoreServiceGrpcTransport +from .transports.grpc_asyncio import MetastoreServiceGrpcAsyncIOTransport +from .transports.rest import MetastoreServiceRestTransport + + +class MetastoreServiceClientMeta(type): + """Metaclass for the MetastoreService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[MetastoreServiceTransport]] + _transport_registry["grpc"] = MetastoreServiceGrpcTransport + _transport_registry["grpc_asyncio"] = MetastoreServiceGrpcAsyncIOTransport + _transport_registry["rest"] = MetastoreServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MetastoreServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MetastoreServiceClient(metaclass=MetastoreServiceClientMeta): + """BigLake Metastore is a serverless, highly available, multi-tenant + runtime metastore for Google Cloud Data Analytics products. + + The BigLake Metastore API defines the following resource model: + + - A collection of Google Cloud projects: ``/projects/*`` + - Each project has a collection of available locations: + ``/locations/*`` + - Each location has a collection of catalogs: ``/catalogs/*`` + - Each catalog has a collection of databases: ``/databases/*`` + - Each database has a collection of tables: ``/tables/*`` + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "biglake.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetastoreServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetastoreServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetastoreServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MetastoreServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def catalog_path( + project: str, + location: str, + catalog: str, + ) -> str: + """Returns a fully-qualified catalog string.""" + return "projects/{project}/locations/{location}/catalogs/{catalog}".format( + project=project, + location=location, + catalog=catalog, + ) + + @staticmethod + def parse_catalog_path(path: str) -> Dict[str, str]: + """Parses a catalog path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def database_path( + project: str, + location: str, + catalog: str, + database: str, + ) -> str: + """Returns a fully-qualified database string.""" + return "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}".format( + project=project, + location=location, + catalog=catalog, + database=database, + ) + + @staticmethod + def parse_database_path(path: str) -> Dict[str, str]: + """Parses a database path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/databases/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def lock_path( + project: str, + location: str, + catalog: str, + database: str, + lock: str, + ) -> str: + """Returns a fully-qualified lock string.""" + return "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/locks/{lock}".format( + project=project, + location=location, + catalog=catalog, + database=database, + lock=lock, + ) + + @staticmethod + def parse_lock_path(path: str) -> Dict[str, str]: + """Parses a lock path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/databases/(?P.+?)/locks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def table_path( + project: str, + location: str, + catalog: str, + database: str, + table: str, + ) -> str: + """Returns a fully-qualified table string.""" + return "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/tables/{table}".format( + project=project, + location=location, + catalog=catalog, + database=database, + table=table, + ) + + @staticmethod + def parse_table_path(path: str) -> Dict[str, str]: + """Parses a table path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/databases/(?P.+?)/tables/(?P
.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MetastoreServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metastore service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, MetastoreServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MetastoreServiceTransport): + # transport is a MetastoreServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_catalog( + self, + request: Optional[Union[metastore.CreateCatalogRequest, dict]] = None, + *, + parent: Optional[str] = None, + catalog: Optional[metastore.Catalog] = None, + catalog_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Creates a new catalog. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_create_catalog(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CreateCatalogRequest( + parent="parent_value", + catalog_id="catalog_id_value", + ) + + # Make the request + response = client.create_catalog(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.CreateCatalogRequest, dict]): + The request object. Request message for the CreateCatalog + method. + parent (str): + Required. The parent resource where this catalog will be + created. Format: + projects/{project_id_or_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + catalog (google.cloud.bigquery.biglake_v1alpha1.types.Catalog): + Required. The catalog to create. The ``name`` field does + not need to be provided. + + This corresponds to the ``catalog`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + catalog_id (str): + Required. The ID to use for the + catalog, which will become the final + component of the catalog's resource + name. + + This corresponds to the ``catalog_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Catalog: + Catalog is the container of + databases. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, catalog, catalog_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.CreateCatalogRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.CreateCatalogRequest): + request = metastore.CreateCatalogRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if catalog is not None: + request.catalog = catalog + if catalog_id is not None: + request.catalog_id = catalog_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_catalog] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_catalog( + self, + request: Optional[Union[metastore.DeleteCatalogRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Deletes an existing catalog specified by the catalog + ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_delete_catalog(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteCatalogRequest( + name="name_value", + ) + + # Make the request + response = client.delete_catalog(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.DeleteCatalogRequest, dict]): + The request object. Request message for the DeleteCatalog + method. + name (str): + Required. The name of the catalog to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Catalog: + Catalog is the container of + databases. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.DeleteCatalogRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.DeleteCatalogRequest): + request = metastore.DeleteCatalogRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_catalog] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_catalog( + self, + request: Optional[Union[metastore.GetCatalogRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Gets the catalog specified by the resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_get_catalog(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.GetCatalogRequest( + name="name_value", + ) + + # Make the request + response = client.get_catalog(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.GetCatalogRequest, dict]): + The request object. Request message for the GetCatalog + method. + name (str): + Required. The name of the catalog to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Catalog: + Catalog is the container of + databases. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.GetCatalogRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.GetCatalogRequest): + request = metastore.GetCatalogRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_catalog] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_catalogs( + self, + request: Optional[Union[metastore.ListCatalogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCatalogsPager: + r"""List all catalogs in a specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_list_catalogs(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListCatalogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_catalogs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.ListCatalogsRequest, dict]): + The request object. Request message for the ListCatalogs + method. + parent (str): + Required. The parent, which owns this collection of + catalogs. Format: + projects/{project_id_or_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListCatalogsPager: + Response message for the ListCatalogs + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.ListCatalogsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.ListCatalogsRequest): + request = metastore.ListCatalogsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_catalogs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCatalogsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_database( + self, + request: Optional[Union[metastore.CreateDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + database: Optional[metastore.Database] = None, + database_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Creates a new database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_create_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = client.create_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.CreateDatabaseRequest, dict]): + The request object. Request message for the + CreateDatabase method. + parent (str): + Required. The parent resource where this database will + be created. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database (google.cloud.bigquery.biglake_v1alpha1.types.Database): + Required. The database to create. The ``name`` field + does not need to be provided. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (str): + Required. The ID to use for the + database, which will become the final + component of the database's resource + name. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, database, database_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.CreateDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.CreateDatabaseRequest): + request = metastore.CreateDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database is not None: + request.database = database + if database_id is not None: + request.database_id = database_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_database( + self, + request: Optional[Union[metastore.DeleteDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Deletes an existing database specified by the + database ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_delete_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.delete_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.DeleteDatabaseRequest, dict]): + The request object. Request message for the + DeleteDatabase method. + name (str): + Required. The name of the database to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.DeleteDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.DeleteDatabaseRequest): + request = metastore.DeleteDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_database( + self, + request: Optional[Union[metastore.UpdateDatabaseRequest, dict]] = None, + *, + database: Optional[metastore.Database] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Updates an existing database specified by the + database ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_update_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.UpdateDatabaseRequest( + ) + + # Make the request + response = client.update_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.UpdateDatabaseRequest, dict]): + The request object. Request message for the + UpdateDatabase method. + database (google.cloud.bigquery.biglake_v1alpha1.types.Database): + Required. The database to update. + + The database's ``name`` field is used to identify the + database to update. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. + + For the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are + allowed to update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.UpdateDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.UpdateDatabaseRequest): + request = metastore.UpdateDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("database.name", request.database.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_database( + self, + request: Optional[Union[metastore.GetDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Gets the database specified by the resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_get_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.GetDatabaseRequest, dict]): + The request object. Request message for the GetDatabase + method. + name (str): + Required. The name of the database to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Database: + Database is the container of tables. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.GetDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.GetDatabaseRequest): + request = metastore.GetDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_databases( + self, + request: Optional[Union[metastore.ListDatabasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatabasesPager: + r"""List all databases in a specified catalog. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_list_databases(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.ListDatabasesRequest, dict]): + The request object. Request message for the ListDatabases + method. + parent (str): + Required. The parent, which owns this collection of + databases. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListDatabasesPager: + Response message for the + ListDatabases method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.ListDatabasesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.ListDatabasesRequest): + request = metastore.ListDatabasesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_databases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatabasesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_table( + self, + request: Optional[Union[metastore.CreateTableRequest, dict]] = None, + *, + parent: Optional[str] = None, + table: Optional[metastore.Table] = None, + table_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Creates a new table. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_create_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CreateTableRequest( + parent="parent_value", + table_id="table_id_value", + ) + + # Make the request + response = client.create_table(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.CreateTableRequest, dict]): + The request object. Request message for the CreateTable + method. + parent (str): + Required. The parent resource where this table will be + created. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + table (google.cloud.bigquery.biglake_v1alpha1.types.Table): + Required. The table to create. The ``name`` field does + not need to be provided for the table creation. + + This corresponds to the ``table`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + table_id (str): + Required. The ID to use for the + table, which will become the final + component of the table's resource name. + + This corresponds to the ``table_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, table, table_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.CreateTableRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.CreateTableRequest): + request = metastore.CreateTableRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if table is not None: + request.table = table + if table_id is not None: + request.table_id = table_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_table] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_table( + self, + request: Optional[Union[metastore.DeleteTableRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Deletes an existing table specified by the table ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_delete_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteTableRequest( + name="name_value", + ) + + # Make the request + response = client.delete_table(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.DeleteTableRequest, dict]): + The request object. Request message for the DeleteTable + method. + name (str): + Required. The name of the table to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.DeleteTableRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.DeleteTableRequest): + request = metastore.DeleteTableRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_table] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_table( + self, + request: Optional[Union[metastore.UpdateTableRequest, dict]] = None, + *, + table: Optional[metastore.Table] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Updates an existing table specified by the table ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_update_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.UpdateTableRequest( + ) + + # Make the request + response = client.update_table(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.UpdateTableRequest, dict]): + The request object. Request message for the UpdateTable + method. + table (google.cloud.bigquery.biglake_v1alpha1.types.Table): + Required. The table to update. + + The table's ``name`` field is used to identify the table + to update. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``table`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. + + For the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are + allowed to update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([table, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.UpdateTableRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.UpdateTableRequest): + request = metastore.UpdateTableRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if table is not None: + request.table = table + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_table] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("table.name", request.table.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def rename_table( + self, + request: Optional[Union[metastore.RenameTableRequest, dict]] = None, + *, + name: Optional[str] = None, + new_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Renames an existing table specified by the table ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_rename_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.RenameTableRequest( + name="name_value", + new_name="new_name_value", + ) + + # Make the request + response = client.rename_table(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.RenameTableRequest, dict]): + The request object. Request message for the RenameTable + method in MetastoreService + name (str): + Required. The table's ``name`` field is used to identify + the table to rename. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_name (str): + Required. The new ``name`` for the specified table, must + be in the same database. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``new_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.RenameTableRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.RenameTableRequest): + request = metastore.RenameTableRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_name is not None: + request.new_name = new_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rename_table] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_table( + self, + request: Optional[Union[metastore.GetTableRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Gets the table specified by the resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_get_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.GetTableRequest( + name="name_value", + ) + + # Make the request + response = client.get_table(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.GetTableRequest, dict]): + The request object. Request message for the GetTable + method. + name (str): + Required. The name of the table to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Table: + Represents a table. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.GetTableRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.GetTableRequest): + request = metastore.GetTableRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_table] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_tables( + self, + request: Optional[Union[metastore.ListTablesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTablesPager: + r"""List all tables in a specified database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_list_tables(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListTablesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tables(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.ListTablesRequest, dict]): + The request object. Request message for the ListTables + method. + parent (str): + Required. The parent, which owns this collection of + tables. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListTablesPager: + Response message for the ListTables + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.ListTablesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.ListTablesRequest): + request = metastore.ListTablesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tables] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTablesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_lock( + self, + request: Optional[Union[metastore.CreateLockRequest, dict]] = None, + *, + parent: Optional[str] = None, + lock: Optional[metastore.Lock] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Lock: + r"""Creates a new lock. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_create_lock(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + lock = biglake_v1alpha1.Lock() + lock.table_id = "table_id_value" + + request = biglake_v1alpha1.CreateLockRequest( + parent="parent_value", + lock=lock, + ) + + # Make the request + response = client.create_lock(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.CreateLockRequest, dict]): + The request object. Request message for the CreateLock + method. + parent (str): + Required. The parent resource where this lock will be + created. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lock (google.cloud.bigquery.biglake_v1alpha1.types.Lock): + Required. The lock to create. The ``name`` field does + not need to be provided for the lock creation. + + This corresponds to the ``lock`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Lock: + Represents a lock. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, lock]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.CreateLockRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.CreateLockRequest): + request = metastore.CreateLockRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if lock is not None: + request.lock = lock + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_lock] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_lock( + self, + request: Optional[Union[metastore.DeleteLockRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing lock specified by the lock ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_delete_lock(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteLockRequest( + name="name_value", + ) + + # Make the request + client.delete_lock(request=request) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.DeleteLockRequest, dict]): + The request object. Request message for the DeleteLock + method. + name (str): + Required. The name of the lock to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.DeleteLockRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.DeleteLockRequest): + request = metastore.DeleteLockRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_lock] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def check_lock( + self, + request: Optional[Union[metastore.CheckLockRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Lock: + r"""Checks the state of a lock specified by the lock ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_check_lock(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CheckLockRequest( + name="name_value", + ) + + # Make the request + response = client.check_lock(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.CheckLockRequest, dict]): + The request object. Request message for the CheckLock + method. + name (str): + Required. The name of the lock to check. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.types.Lock: + Represents a lock. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.CheckLockRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.CheckLockRequest): + request = metastore.CheckLockRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.check_lock] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locks( + self, + request: Optional[Union[metastore.ListLocksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLocksPager: + r"""List all locks in a specified database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.bigquery import biglake_v1alpha1 + + def sample_list_locks(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListLocksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_locks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery.biglake_v1alpha1.types.ListLocksRequest, dict]): + The request object. Request message for the ListLocks + method. + parent (str): + Required. The parent, which owns this collection of + locks. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListLocksPager: + Response message for the ListLocks + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.ListLocksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.ListLocksRequest): + request = metastore.ListLocksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLocksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "MetastoreServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MetastoreServiceClient",) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/pagers.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/pagers.py new file mode 100644 index 000000000000..190d9f923b91 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/pagers.py @@ -0,0 +1,539 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.bigquery.biglake_v1alpha1.types import metastore + + +class ListCatalogsPager: + """A pager for iterating through ``list_catalogs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListCatalogsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``catalogs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCatalogs`` requests and continue to iterate + through the ``catalogs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListCatalogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., metastore.ListCatalogsResponse], + request: metastore.ListCatalogsRequest, + response: metastore.ListCatalogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery.biglake_v1alpha1.types.ListCatalogsRequest): + The initial request object. + response (google.cloud.bigquery.biglake_v1alpha1.types.ListCatalogsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListCatalogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[metastore.ListCatalogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[metastore.Catalog]: + for page in self.pages: + yield from page.catalogs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListCatalogsAsyncPager: + """A pager for iterating through ``list_catalogs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListCatalogsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``catalogs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListCatalogs`` requests and continue to iterate + through the ``catalogs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListCatalogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[metastore.ListCatalogsResponse]], + request: metastore.ListCatalogsRequest, + response: metastore.ListCatalogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery.biglake_v1alpha1.types.ListCatalogsRequest): + The initial request object. + response (google.cloud.bigquery.biglake_v1alpha1.types.ListCatalogsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListCatalogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[metastore.ListCatalogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[metastore.Catalog]: + async def async_generator(): + async for page in self.pages: + for response in page.catalogs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDatabasesPager: + """A pager for iterating through ``list_databases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListDatabasesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``databases`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatabases`` requests and continue to iterate + through the ``databases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListDatabasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., metastore.ListDatabasesResponse], + request: metastore.ListDatabasesRequest, + response: metastore.ListDatabasesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery.biglake_v1alpha1.types.ListDatabasesRequest): + The initial request object. + response (google.cloud.bigquery.biglake_v1alpha1.types.ListDatabasesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListDatabasesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[metastore.ListDatabasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[metastore.Database]: + for page in self.pages: + yield from page.databases + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDatabasesAsyncPager: + """A pager for iterating through ``list_databases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListDatabasesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``databases`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatabases`` requests and continue to iterate + through the ``databases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListDatabasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[metastore.ListDatabasesResponse]], + request: metastore.ListDatabasesRequest, + response: metastore.ListDatabasesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery.biglake_v1alpha1.types.ListDatabasesRequest): + The initial request object. + response (google.cloud.bigquery.biglake_v1alpha1.types.ListDatabasesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListDatabasesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[metastore.ListDatabasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[metastore.Database]: + async def async_generator(): + async for page in self.pages: + for response in page.databases: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTablesPager: + """A pager for iterating through ``list_tables`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListTablesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tables`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTables`` requests and continue to iterate + through the ``tables`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListTablesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., metastore.ListTablesResponse], + request: metastore.ListTablesRequest, + response: metastore.ListTablesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery.biglake_v1alpha1.types.ListTablesRequest): + The initial request object. + response (google.cloud.bigquery.biglake_v1alpha1.types.ListTablesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListTablesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[metastore.ListTablesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[metastore.Table]: + for page in self.pages: + yield from page.tables + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTablesAsyncPager: + """A pager for iterating through ``list_tables`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListTablesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tables`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTables`` requests and continue to iterate + through the ``tables`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListTablesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[metastore.ListTablesResponse]], + request: metastore.ListTablesRequest, + response: metastore.ListTablesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery.biglake_v1alpha1.types.ListTablesRequest): + The initial request object. + response (google.cloud.bigquery.biglake_v1alpha1.types.ListTablesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListTablesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[metastore.ListTablesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[metastore.Table]: + async def async_generator(): + async for page in self.pages: + for response in page.tables: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListLocksPager: + """A pager for iterating through ``list_locks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListLocksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``locks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLocks`` requests and continue to iterate + through the ``locks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListLocksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., metastore.ListLocksResponse], + request: metastore.ListLocksRequest, + response: metastore.ListLocksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery.biglake_v1alpha1.types.ListLocksRequest): + The initial request object. + response (google.cloud.bigquery.biglake_v1alpha1.types.ListLocksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListLocksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[metastore.ListLocksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[metastore.Lock]: + for page in self.pages: + yield from page.locks + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListLocksAsyncPager: + """A pager for iterating through ``list_locks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListLocksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``locks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLocks`` requests and continue to iterate + through the ``locks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery.biglake_v1alpha1.types.ListLocksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[metastore.ListLocksResponse]], + request: metastore.ListLocksRequest, + response: metastore.ListLocksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery.biglake_v1alpha1.types.ListLocksRequest): + The initial request object. + response (google.cloud.bigquery.biglake_v1alpha1.types.ListLocksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListLocksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[metastore.ListLocksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[metastore.Lock]: + async def async_generator(): + async for page in self.pages: + for response in page.locks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/transports/__init__.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/transports/__init__.py new file mode 100644 index 000000000000..239db2ae2de3 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MetastoreServiceTransport +from .grpc import MetastoreServiceGrpcTransport +from .grpc_asyncio import MetastoreServiceGrpcAsyncIOTransport +from .rest import MetastoreServiceRestInterceptor, MetastoreServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MetastoreServiceTransport]] +_transport_registry["grpc"] = MetastoreServiceGrpcTransport +_transport_registry["grpc_asyncio"] = MetastoreServiceGrpcAsyncIOTransport +_transport_registry["rest"] = MetastoreServiceRestTransport + +__all__ = ( + "MetastoreServiceTransport", + "MetastoreServiceGrpcTransport", + "MetastoreServiceGrpcAsyncIOTransport", + "MetastoreServiceRestTransport", + "MetastoreServiceRestInterceptor", +) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/transports/base.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/transports/base.py new file mode 100644 index 000000000000..425dc9456099 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/transports/base.py @@ -0,0 +1,412 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.bigquery.biglake_v1alpha1 import gapic_version as package_version +from google.cloud.bigquery.biglake_v1alpha1.types import metastore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class MetastoreServiceTransport(abc.ABC): + """Abstract transport class for MetastoreService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "biglake.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_catalog: gapic_v1.method.wrap_method( + self.create_catalog, + default_timeout=None, + client_info=client_info, + ), + self.delete_catalog: gapic_v1.method.wrap_method( + self.delete_catalog, + default_timeout=None, + client_info=client_info, + ), + self.get_catalog: gapic_v1.method.wrap_method( + self.get_catalog, + default_timeout=None, + client_info=client_info, + ), + self.list_catalogs: gapic_v1.method.wrap_method( + self.list_catalogs, + default_timeout=None, + client_info=client_info, + ), + self.create_database: gapic_v1.method.wrap_method( + self.create_database, + default_timeout=None, + client_info=client_info, + ), + self.delete_database: gapic_v1.method.wrap_method( + self.delete_database, + default_timeout=None, + client_info=client_info, + ), + self.update_database: gapic_v1.method.wrap_method( + self.update_database, + default_timeout=None, + client_info=client_info, + ), + self.get_database: gapic_v1.method.wrap_method( + self.get_database, + default_timeout=None, + client_info=client_info, + ), + self.list_databases: gapic_v1.method.wrap_method( + self.list_databases, + default_timeout=None, + client_info=client_info, + ), + self.create_table: gapic_v1.method.wrap_method( + self.create_table, + default_timeout=None, + client_info=client_info, + ), + self.delete_table: gapic_v1.method.wrap_method( + self.delete_table, + default_timeout=None, + client_info=client_info, + ), + self.update_table: gapic_v1.method.wrap_method( + self.update_table, + default_timeout=None, + client_info=client_info, + ), + self.rename_table: gapic_v1.method.wrap_method( + self.rename_table, + default_timeout=None, + client_info=client_info, + ), + self.get_table: gapic_v1.method.wrap_method( + self.get_table, + default_timeout=None, + client_info=client_info, + ), + self.list_tables: gapic_v1.method.wrap_method( + self.list_tables, + default_timeout=None, + client_info=client_info, + ), + self.create_lock: gapic_v1.method.wrap_method( + self.create_lock, + default_timeout=None, + client_info=client_info, + ), + self.delete_lock: gapic_v1.method.wrap_method( + self.delete_lock, + default_timeout=None, + client_info=client_info, + ), + self.check_lock: gapic_v1.method.wrap_method( + self.check_lock, + default_timeout=None, + client_info=client_info, + ), + self.list_locks: gapic_v1.method.wrap_method( + self.list_locks, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_catalog( + self, + ) -> Callable[ + [metastore.CreateCatalogRequest], + Union[metastore.Catalog, Awaitable[metastore.Catalog]], + ]: + raise NotImplementedError() + + @property + def delete_catalog( + self, + ) -> Callable[ + [metastore.DeleteCatalogRequest], + Union[metastore.Catalog, Awaitable[metastore.Catalog]], + ]: + raise NotImplementedError() + + @property + def get_catalog( + self, + ) -> Callable[ + [metastore.GetCatalogRequest], + Union[metastore.Catalog, Awaitable[metastore.Catalog]], + ]: + raise NotImplementedError() + + @property + def list_catalogs( + self, + ) -> Callable[ + [metastore.ListCatalogsRequest], + Union[ + metastore.ListCatalogsResponse, Awaitable[metastore.ListCatalogsResponse] + ], + ]: + raise NotImplementedError() + + @property + def create_database( + self, + ) -> Callable[ + [metastore.CreateDatabaseRequest], + Union[metastore.Database, Awaitable[metastore.Database]], + ]: + raise NotImplementedError() + + @property + def delete_database( + self, + ) -> Callable[ + [metastore.DeleteDatabaseRequest], + Union[metastore.Database, Awaitable[metastore.Database]], + ]: + raise NotImplementedError() + + @property + def update_database( + self, + ) -> Callable[ + [metastore.UpdateDatabaseRequest], + Union[metastore.Database, Awaitable[metastore.Database]], + ]: + raise NotImplementedError() + + @property + def get_database( + self, + ) -> Callable[ + [metastore.GetDatabaseRequest], + Union[metastore.Database, Awaitable[metastore.Database]], + ]: + raise NotImplementedError() + + @property + def list_databases( + self, + ) -> Callable[ + [metastore.ListDatabasesRequest], + Union[ + metastore.ListDatabasesResponse, Awaitable[metastore.ListDatabasesResponse] + ], + ]: + raise NotImplementedError() + + @property + def create_table( + self, + ) -> Callable[ + [metastore.CreateTableRequest], + Union[metastore.Table, Awaitable[metastore.Table]], + ]: + raise NotImplementedError() + + @property + def delete_table( + self, + ) -> Callable[ + [metastore.DeleteTableRequest], + Union[metastore.Table, Awaitable[metastore.Table]], + ]: + raise NotImplementedError() + + @property + def update_table( + self, + ) -> Callable[ + [metastore.UpdateTableRequest], + Union[metastore.Table, Awaitable[metastore.Table]], + ]: + raise NotImplementedError() + + @property + def rename_table( + self, + ) -> Callable[ + [metastore.RenameTableRequest], + Union[metastore.Table, Awaitable[metastore.Table]], + ]: + raise NotImplementedError() + + @property + def get_table( + self, + ) -> Callable[ + [metastore.GetTableRequest], Union[metastore.Table, Awaitable[metastore.Table]] + ]: + raise NotImplementedError() + + @property + def list_tables( + self, + ) -> Callable[ + [metastore.ListTablesRequest], + Union[metastore.ListTablesResponse, Awaitable[metastore.ListTablesResponse]], + ]: + raise NotImplementedError() + + @property + def create_lock( + self, + ) -> Callable[ + [metastore.CreateLockRequest], Union[metastore.Lock, Awaitable[metastore.Lock]] + ]: + raise NotImplementedError() + + @property + def delete_lock( + self, + ) -> Callable[ + [metastore.DeleteLockRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def check_lock( + self, + ) -> Callable[ + [metastore.CheckLockRequest], Union[metastore.Lock, Awaitable[metastore.Lock]] + ]: + raise NotImplementedError() + + @property + def list_locks( + self, + ) -> Callable[ + [metastore.ListLocksRequest], + Union[metastore.ListLocksResponse, Awaitable[metastore.ListLocksResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("MetastoreServiceTransport",) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/transports/grpc.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/transports/grpc.py new file mode 100644 index 000000000000..9cca78a34dfa --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/transports/grpc.py @@ -0,0 +1,729 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.bigquery.biglake_v1alpha1.types import metastore + +from .base import DEFAULT_CLIENT_INFO, MetastoreServiceTransport + + +class MetastoreServiceGrpcTransport(MetastoreServiceTransport): + """gRPC backend transport for MetastoreService. + + BigLake Metastore is a serverless, highly available, multi-tenant + runtime metastore for Google Cloud Data Analytics products. + + The BigLake Metastore API defines the following resource model: + + - A collection of Google Cloud projects: ``/projects/*`` + - Each project has a collection of available locations: + ``/locations/*`` + - Each location has a collection of catalogs: ``/catalogs/*`` + - Each catalog has a collection of databases: ``/databases/*`` + - Each database has a collection of tables: ``/tables/*`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "biglake.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "biglake.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_catalog( + self, + ) -> Callable[[metastore.CreateCatalogRequest], metastore.Catalog]: + r"""Return a callable for the create catalog method over gRPC. + + Creates a new catalog. + + Returns: + Callable[[~.CreateCatalogRequest], + ~.Catalog]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_catalog" not in self._stubs: + self._stubs["create_catalog"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateCatalog", + request_serializer=metastore.CreateCatalogRequest.serialize, + response_deserializer=metastore.Catalog.deserialize, + ) + return self._stubs["create_catalog"] + + @property + def delete_catalog( + self, + ) -> Callable[[metastore.DeleteCatalogRequest], metastore.Catalog]: + r"""Return a callable for the delete catalog method over gRPC. + + Deletes an existing catalog specified by the catalog + ID. + + Returns: + Callable[[~.DeleteCatalogRequest], + ~.Catalog]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_catalog" not in self._stubs: + self._stubs["delete_catalog"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteCatalog", + request_serializer=metastore.DeleteCatalogRequest.serialize, + response_deserializer=metastore.Catalog.deserialize, + ) + return self._stubs["delete_catalog"] + + @property + def get_catalog(self) -> Callable[[metastore.GetCatalogRequest], metastore.Catalog]: + r"""Return a callable for the get catalog method over gRPC. + + Gets the catalog specified by the resource name. + + Returns: + Callable[[~.GetCatalogRequest], + ~.Catalog]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_catalog" not in self._stubs: + self._stubs["get_catalog"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetCatalog", + request_serializer=metastore.GetCatalogRequest.serialize, + response_deserializer=metastore.Catalog.deserialize, + ) + return self._stubs["get_catalog"] + + @property + def list_catalogs( + self, + ) -> Callable[[metastore.ListCatalogsRequest], metastore.ListCatalogsResponse]: + r"""Return a callable for the list catalogs method over gRPC. + + List all catalogs in a specified project. + + Returns: + Callable[[~.ListCatalogsRequest], + ~.ListCatalogsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_catalogs" not in self._stubs: + self._stubs["list_catalogs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListCatalogs", + request_serializer=metastore.ListCatalogsRequest.serialize, + response_deserializer=metastore.ListCatalogsResponse.deserialize, + ) + return self._stubs["list_catalogs"] + + @property + def create_database( + self, + ) -> Callable[[metastore.CreateDatabaseRequest], metastore.Database]: + r"""Return a callable for the create database method over gRPC. + + Creates a new database. + + Returns: + Callable[[~.CreateDatabaseRequest], + ~.Database]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_database" not in self._stubs: + self._stubs["create_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateDatabase", + request_serializer=metastore.CreateDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["create_database"] + + @property + def delete_database( + self, + ) -> Callable[[metastore.DeleteDatabaseRequest], metastore.Database]: + r"""Return a callable for the delete database method over gRPC. + + Deletes an existing database specified by the + database ID. + + Returns: + Callable[[~.DeleteDatabaseRequest], + ~.Database]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_database" not in self._stubs: + self._stubs["delete_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteDatabase", + request_serializer=metastore.DeleteDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["delete_database"] + + @property + def update_database( + self, + ) -> Callable[[metastore.UpdateDatabaseRequest], metastore.Database]: + r"""Return a callable for the update database method over gRPC. + + Updates an existing database specified by the + database ID. + + Returns: + Callable[[~.UpdateDatabaseRequest], + ~.Database]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_database" not in self._stubs: + self._stubs["update_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/UpdateDatabase", + request_serializer=metastore.UpdateDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["update_database"] + + @property + def get_database( + self, + ) -> Callable[[metastore.GetDatabaseRequest], metastore.Database]: + r"""Return a callable for the get database method over gRPC. + + Gets the database specified by the resource name. + + Returns: + Callable[[~.GetDatabaseRequest], + ~.Database]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_database" not in self._stubs: + self._stubs["get_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetDatabase", + request_serializer=metastore.GetDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["get_database"] + + @property + def list_databases( + self, + ) -> Callable[[metastore.ListDatabasesRequest], metastore.ListDatabasesResponse]: + r"""Return a callable for the list databases method over gRPC. + + List all databases in a specified catalog. + + Returns: + Callable[[~.ListDatabasesRequest], + ~.ListDatabasesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_databases" not in self._stubs: + self._stubs["list_databases"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListDatabases", + request_serializer=metastore.ListDatabasesRequest.serialize, + response_deserializer=metastore.ListDatabasesResponse.deserialize, + ) + return self._stubs["list_databases"] + + @property + def create_table(self) -> Callable[[metastore.CreateTableRequest], metastore.Table]: + r"""Return a callable for the create table method over gRPC. + + Creates a new table. + + Returns: + Callable[[~.CreateTableRequest], + ~.Table]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_table" not in self._stubs: + self._stubs["create_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateTable", + request_serializer=metastore.CreateTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["create_table"] + + @property + def delete_table(self) -> Callable[[metastore.DeleteTableRequest], metastore.Table]: + r"""Return a callable for the delete table method over gRPC. + + Deletes an existing table specified by the table ID. + + Returns: + Callable[[~.DeleteTableRequest], + ~.Table]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_table" not in self._stubs: + self._stubs["delete_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteTable", + request_serializer=metastore.DeleteTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["delete_table"] + + @property + def update_table(self) -> Callable[[metastore.UpdateTableRequest], metastore.Table]: + r"""Return a callable for the update table method over gRPC. + + Updates an existing table specified by the table ID. + + Returns: + Callable[[~.UpdateTableRequest], + ~.Table]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_table" not in self._stubs: + self._stubs["update_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/UpdateTable", + request_serializer=metastore.UpdateTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["update_table"] + + @property + def rename_table(self) -> Callable[[metastore.RenameTableRequest], metastore.Table]: + r"""Return a callable for the rename table method over gRPC. + + Renames an existing table specified by the table ID. + + Returns: + Callable[[~.RenameTableRequest], + ~.Table]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_table" not in self._stubs: + self._stubs["rename_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/RenameTable", + request_serializer=metastore.RenameTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["rename_table"] + + @property + def get_table(self) -> Callable[[metastore.GetTableRequest], metastore.Table]: + r"""Return a callable for the get table method over gRPC. + + Gets the table specified by the resource name. + + Returns: + Callable[[~.GetTableRequest], + ~.Table]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_table" not in self._stubs: + self._stubs["get_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetTable", + request_serializer=metastore.GetTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["get_table"] + + @property + def list_tables( + self, + ) -> Callable[[metastore.ListTablesRequest], metastore.ListTablesResponse]: + r"""Return a callable for the list tables method over gRPC. + + List all tables in a specified database. + + Returns: + Callable[[~.ListTablesRequest], + ~.ListTablesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tables" not in self._stubs: + self._stubs["list_tables"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListTables", + request_serializer=metastore.ListTablesRequest.serialize, + response_deserializer=metastore.ListTablesResponse.deserialize, + ) + return self._stubs["list_tables"] + + @property + def create_lock(self) -> Callable[[metastore.CreateLockRequest], metastore.Lock]: + r"""Return a callable for the create lock method over gRPC. + + Creates a new lock. + + Returns: + Callable[[~.CreateLockRequest], + ~.Lock]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_lock" not in self._stubs: + self._stubs["create_lock"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateLock", + request_serializer=metastore.CreateLockRequest.serialize, + response_deserializer=metastore.Lock.deserialize, + ) + return self._stubs["create_lock"] + + @property + def delete_lock(self) -> Callable[[metastore.DeleteLockRequest], empty_pb2.Empty]: + r"""Return a callable for the delete lock method over gRPC. + + Deletes an existing lock specified by the lock ID. + + Returns: + Callable[[~.DeleteLockRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_lock" not in self._stubs: + self._stubs["delete_lock"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteLock", + request_serializer=metastore.DeleteLockRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_lock"] + + @property + def check_lock(self) -> Callable[[metastore.CheckLockRequest], metastore.Lock]: + r"""Return a callable for the check lock method over gRPC. + + Checks the state of a lock specified by the lock ID. + + Returns: + Callable[[~.CheckLockRequest], + ~.Lock]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_lock" not in self._stubs: + self._stubs["check_lock"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CheckLock", + request_serializer=metastore.CheckLockRequest.serialize, + response_deserializer=metastore.Lock.deserialize, + ) + return self._stubs["check_lock"] + + @property + def list_locks( + self, + ) -> Callable[[metastore.ListLocksRequest], metastore.ListLocksResponse]: + r"""Return a callable for the list locks method over gRPC. + + List all locks in a specified database. + + Returns: + Callable[[~.ListLocksRequest], + ~.ListLocksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locks" not in self._stubs: + self._stubs["list_locks"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListLocks", + request_serializer=metastore.ListLocksRequest.serialize, + response_deserializer=metastore.ListLocksResponse.deserialize, + ) + return self._stubs["list_locks"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("MetastoreServiceGrpcTransport",) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/transports/grpc_asyncio.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..2547311936ca --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/transports/grpc_asyncio.py @@ -0,0 +1,752 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.bigquery.biglake_v1alpha1.types import metastore + +from .base import DEFAULT_CLIENT_INFO, MetastoreServiceTransport +from .grpc import MetastoreServiceGrpcTransport + + +class MetastoreServiceGrpcAsyncIOTransport(MetastoreServiceTransport): + """gRPC AsyncIO backend transport for MetastoreService. + + BigLake Metastore is a serverless, highly available, multi-tenant + runtime metastore for Google Cloud Data Analytics products. + + The BigLake Metastore API defines the following resource model: + + - A collection of Google Cloud projects: ``/projects/*`` + - Each project has a collection of available locations: + ``/locations/*`` + - Each location has a collection of catalogs: ``/catalogs/*`` + - Each catalog has a collection of databases: ``/databases/*`` + - Each database has a collection of tables: ``/tables/*`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "biglake.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "biglake.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_catalog( + self, + ) -> Callable[[metastore.CreateCatalogRequest], Awaitable[metastore.Catalog]]: + r"""Return a callable for the create catalog method over gRPC. + + Creates a new catalog. + + Returns: + Callable[[~.CreateCatalogRequest], + Awaitable[~.Catalog]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_catalog" not in self._stubs: + self._stubs["create_catalog"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateCatalog", + request_serializer=metastore.CreateCatalogRequest.serialize, + response_deserializer=metastore.Catalog.deserialize, + ) + return self._stubs["create_catalog"] + + @property + def delete_catalog( + self, + ) -> Callable[[metastore.DeleteCatalogRequest], Awaitable[metastore.Catalog]]: + r"""Return a callable for the delete catalog method over gRPC. + + Deletes an existing catalog specified by the catalog + ID. + + Returns: + Callable[[~.DeleteCatalogRequest], + Awaitable[~.Catalog]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_catalog" not in self._stubs: + self._stubs["delete_catalog"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteCatalog", + request_serializer=metastore.DeleteCatalogRequest.serialize, + response_deserializer=metastore.Catalog.deserialize, + ) + return self._stubs["delete_catalog"] + + @property + def get_catalog( + self, + ) -> Callable[[metastore.GetCatalogRequest], Awaitable[metastore.Catalog]]: + r"""Return a callable for the get catalog method over gRPC. + + Gets the catalog specified by the resource name. + + Returns: + Callable[[~.GetCatalogRequest], + Awaitable[~.Catalog]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_catalog" not in self._stubs: + self._stubs["get_catalog"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetCatalog", + request_serializer=metastore.GetCatalogRequest.serialize, + response_deserializer=metastore.Catalog.deserialize, + ) + return self._stubs["get_catalog"] + + @property + def list_catalogs( + self, + ) -> Callable[ + [metastore.ListCatalogsRequest], Awaitable[metastore.ListCatalogsResponse] + ]: + r"""Return a callable for the list catalogs method over gRPC. + + List all catalogs in a specified project. + + Returns: + Callable[[~.ListCatalogsRequest], + Awaitable[~.ListCatalogsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_catalogs" not in self._stubs: + self._stubs["list_catalogs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListCatalogs", + request_serializer=metastore.ListCatalogsRequest.serialize, + response_deserializer=metastore.ListCatalogsResponse.deserialize, + ) + return self._stubs["list_catalogs"] + + @property + def create_database( + self, + ) -> Callable[[metastore.CreateDatabaseRequest], Awaitable[metastore.Database]]: + r"""Return a callable for the create database method over gRPC. + + Creates a new database. + + Returns: + Callable[[~.CreateDatabaseRequest], + Awaitable[~.Database]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_database" not in self._stubs: + self._stubs["create_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateDatabase", + request_serializer=metastore.CreateDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["create_database"] + + @property + def delete_database( + self, + ) -> Callable[[metastore.DeleteDatabaseRequest], Awaitable[metastore.Database]]: + r"""Return a callable for the delete database method over gRPC. + + Deletes an existing database specified by the + database ID. + + Returns: + Callable[[~.DeleteDatabaseRequest], + Awaitable[~.Database]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_database" not in self._stubs: + self._stubs["delete_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteDatabase", + request_serializer=metastore.DeleteDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["delete_database"] + + @property + def update_database( + self, + ) -> Callable[[metastore.UpdateDatabaseRequest], Awaitable[metastore.Database]]: + r"""Return a callable for the update database method over gRPC. + + Updates an existing database specified by the + database ID. + + Returns: + Callable[[~.UpdateDatabaseRequest], + Awaitable[~.Database]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_database" not in self._stubs: + self._stubs["update_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/UpdateDatabase", + request_serializer=metastore.UpdateDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["update_database"] + + @property + def get_database( + self, + ) -> Callable[[metastore.GetDatabaseRequest], Awaitable[metastore.Database]]: + r"""Return a callable for the get database method over gRPC. + + Gets the database specified by the resource name. + + Returns: + Callable[[~.GetDatabaseRequest], + Awaitable[~.Database]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_database" not in self._stubs: + self._stubs["get_database"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetDatabase", + request_serializer=metastore.GetDatabaseRequest.serialize, + response_deserializer=metastore.Database.deserialize, + ) + return self._stubs["get_database"] + + @property + def list_databases( + self, + ) -> Callable[ + [metastore.ListDatabasesRequest], Awaitable[metastore.ListDatabasesResponse] + ]: + r"""Return a callable for the list databases method over gRPC. + + List all databases in a specified catalog. + + Returns: + Callable[[~.ListDatabasesRequest], + Awaitable[~.ListDatabasesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_databases" not in self._stubs: + self._stubs["list_databases"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListDatabases", + request_serializer=metastore.ListDatabasesRequest.serialize, + response_deserializer=metastore.ListDatabasesResponse.deserialize, + ) + return self._stubs["list_databases"] + + @property + def create_table( + self, + ) -> Callable[[metastore.CreateTableRequest], Awaitable[metastore.Table]]: + r"""Return a callable for the create table method over gRPC. + + Creates a new table. + + Returns: + Callable[[~.CreateTableRequest], + Awaitable[~.Table]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_table" not in self._stubs: + self._stubs["create_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateTable", + request_serializer=metastore.CreateTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["create_table"] + + @property + def delete_table( + self, + ) -> Callable[[metastore.DeleteTableRequest], Awaitable[metastore.Table]]: + r"""Return a callable for the delete table method over gRPC. + + Deletes an existing table specified by the table ID. + + Returns: + Callable[[~.DeleteTableRequest], + Awaitable[~.Table]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_table" not in self._stubs: + self._stubs["delete_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteTable", + request_serializer=metastore.DeleteTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["delete_table"] + + @property + def update_table( + self, + ) -> Callable[[metastore.UpdateTableRequest], Awaitable[metastore.Table]]: + r"""Return a callable for the update table method over gRPC. + + Updates an existing table specified by the table ID. + + Returns: + Callable[[~.UpdateTableRequest], + Awaitable[~.Table]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_table" not in self._stubs: + self._stubs["update_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/UpdateTable", + request_serializer=metastore.UpdateTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["update_table"] + + @property + def rename_table( + self, + ) -> Callable[[metastore.RenameTableRequest], Awaitable[metastore.Table]]: + r"""Return a callable for the rename table method over gRPC. + + Renames an existing table specified by the table ID. + + Returns: + Callable[[~.RenameTableRequest], + Awaitable[~.Table]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_table" not in self._stubs: + self._stubs["rename_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/RenameTable", + request_serializer=metastore.RenameTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["rename_table"] + + @property + def get_table( + self, + ) -> Callable[[metastore.GetTableRequest], Awaitable[metastore.Table]]: + r"""Return a callable for the get table method over gRPC. + + Gets the table specified by the resource name. + + Returns: + Callable[[~.GetTableRequest], + Awaitable[~.Table]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_table" not in self._stubs: + self._stubs["get_table"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetTable", + request_serializer=metastore.GetTableRequest.serialize, + response_deserializer=metastore.Table.deserialize, + ) + return self._stubs["get_table"] + + @property + def list_tables( + self, + ) -> Callable[ + [metastore.ListTablesRequest], Awaitable[metastore.ListTablesResponse] + ]: + r"""Return a callable for the list tables method over gRPC. + + List all tables in a specified database. + + Returns: + Callable[[~.ListTablesRequest], + Awaitable[~.ListTablesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tables" not in self._stubs: + self._stubs["list_tables"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListTables", + request_serializer=metastore.ListTablesRequest.serialize, + response_deserializer=metastore.ListTablesResponse.deserialize, + ) + return self._stubs["list_tables"] + + @property + def create_lock( + self, + ) -> Callable[[metastore.CreateLockRequest], Awaitable[metastore.Lock]]: + r"""Return a callable for the create lock method over gRPC. + + Creates a new lock. + + Returns: + Callable[[~.CreateLockRequest], + Awaitable[~.Lock]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_lock" not in self._stubs: + self._stubs["create_lock"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateLock", + request_serializer=metastore.CreateLockRequest.serialize, + response_deserializer=metastore.Lock.deserialize, + ) + return self._stubs["create_lock"] + + @property + def delete_lock( + self, + ) -> Callable[[metastore.DeleteLockRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete lock method over gRPC. + + Deletes an existing lock specified by the lock ID. + + Returns: + Callable[[~.DeleteLockRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_lock" not in self._stubs: + self._stubs["delete_lock"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteLock", + request_serializer=metastore.DeleteLockRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_lock"] + + @property + def check_lock( + self, + ) -> Callable[[metastore.CheckLockRequest], Awaitable[metastore.Lock]]: + r"""Return a callable for the check lock method over gRPC. + + Checks the state of a lock specified by the lock ID. + + Returns: + Callable[[~.CheckLockRequest], + Awaitable[~.Lock]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_lock" not in self._stubs: + self._stubs["check_lock"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CheckLock", + request_serializer=metastore.CheckLockRequest.serialize, + response_deserializer=metastore.Lock.deserialize, + ) + return self._stubs["check_lock"] + + @property + def list_locks( + self, + ) -> Callable[[metastore.ListLocksRequest], Awaitable[metastore.ListLocksResponse]]: + r"""Return a callable for the list locks method over gRPC. + + List all locks in a specified database. + + Returns: + Callable[[~.ListLocksRequest], + Awaitable[~.ListLocksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locks" not in self._stubs: + self._stubs["list_locks"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListLocks", + request_serializer=metastore.ListLocksRequest.serialize, + response_deserializer=metastore.ListLocksResponse.deserialize, + ) + return self._stubs["list_locks"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("MetastoreServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/transports/rest.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/transports/rest.py new file mode 100644 index 000000000000..2fc21d5cc11a --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/services/metastore_service/transports/rest.py @@ -0,0 +1,2578 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.bigquery.biglake_v1alpha1.types import metastore + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import MetastoreServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MetastoreServiceRestInterceptor: + """Interceptor for MetastoreService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MetastoreServiceRestTransport. + + .. code-block:: python + class MyCustomMetastoreServiceInterceptor(MetastoreServiceRestInterceptor): + def pre_check_lock(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_check_lock(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_catalog(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_catalog(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_lock(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_lock(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_table(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_table(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_catalog(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_catalog(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_lock(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_table(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_table(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_catalog(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_catalog(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_table(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_table(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_catalogs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_catalogs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_databases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_databases(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_locks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_locks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_tables(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_tables(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_rename_table(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_rename_table(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_table(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_table(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MetastoreServiceRestTransport(interceptor=MyCustomMetastoreServiceInterceptor()) + client = MetastoreServiceClient(transport=transport) + + + """ + + def pre_check_lock( + self, request: metastore.CheckLockRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.CheckLockRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for check_lock + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_check_lock(self, response: metastore.Lock) -> metastore.Lock: + """Post-rpc interceptor for check_lock + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_create_catalog( + self, + request: metastore.CreateCatalogRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metastore.CreateCatalogRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_catalog + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_create_catalog(self, response: metastore.Catalog) -> metastore.Catalog: + """Post-rpc interceptor for create_catalog + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_create_database( + self, + request: metastore.CreateDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metastore.CreateDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_create_database(self, response: metastore.Database) -> metastore.Database: + """Post-rpc interceptor for create_database + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_create_lock( + self, request: metastore.CreateLockRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.CreateLockRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_lock + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_create_lock(self, response: metastore.Lock) -> metastore.Lock: + """Post-rpc interceptor for create_lock + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_create_table( + self, request: metastore.CreateTableRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.CreateTableRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_table + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_create_table(self, response: metastore.Table) -> metastore.Table: + """Post-rpc interceptor for create_table + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_delete_catalog( + self, + request: metastore.DeleteCatalogRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metastore.DeleteCatalogRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_catalog + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_delete_catalog(self, response: metastore.Catalog) -> metastore.Catalog: + """Post-rpc interceptor for delete_catalog + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_delete_database( + self, + request: metastore.DeleteDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metastore.DeleteDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_delete_database(self, response: metastore.Database) -> metastore.Database: + """Post-rpc interceptor for delete_database + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_delete_lock( + self, request: metastore.DeleteLockRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.DeleteLockRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_lock + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def pre_delete_table( + self, request: metastore.DeleteTableRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.DeleteTableRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_table + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_delete_table(self, response: metastore.Table) -> metastore.Table: + """Post-rpc interceptor for delete_table + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_get_catalog( + self, request: metastore.GetCatalogRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.GetCatalogRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_catalog + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_get_catalog(self, response: metastore.Catalog) -> metastore.Catalog: + """Post-rpc interceptor for get_catalog + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_get_database( + self, request: metastore.GetDatabaseRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.GetDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_get_database(self, response: metastore.Database) -> metastore.Database: + """Post-rpc interceptor for get_database + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_get_table( + self, request: metastore.GetTableRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.GetTableRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_table + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_get_table(self, response: metastore.Table) -> metastore.Table: + """Post-rpc interceptor for get_table + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_list_catalogs( + self, + request: metastore.ListCatalogsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metastore.ListCatalogsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_catalogs + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_list_catalogs( + self, response: metastore.ListCatalogsResponse + ) -> metastore.ListCatalogsResponse: + """Post-rpc interceptor for list_catalogs + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_list_databases( + self, + request: metastore.ListDatabasesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metastore.ListDatabasesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_databases + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_list_databases( + self, response: metastore.ListDatabasesResponse + ) -> metastore.ListDatabasesResponse: + """Post-rpc interceptor for list_databases + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_list_locks( + self, request: metastore.ListLocksRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.ListLocksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locks + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_list_locks( + self, response: metastore.ListLocksResponse + ) -> metastore.ListLocksResponse: + """Post-rpc interceptor for list_locks + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_list_tables( + self, request: metastore.ListTablesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.ListTablesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_tables + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_list_tables( + self, response: metastore.ListTablesResponse + ) -> metastore.ListTablesResponse: + """Post-rpc interceptor for list_tables + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_rename_table( + self, request: metastore.RenameTableRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.RenameTableRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for rename_table + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_rename_table(self, response: metastore.Table) -> metastore.Table: + """Post-rpc interceptor for rename_table + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_update_database( + self, + request: metastore.UpdateDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metastore.UpdateDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_update_database(self, response: metastore.Database) -> metastore.Database: + """Post-rpc interceptor for update_database + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + def pre_update_table( + self, request: metastore.UpdateTableRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metastore.UpdateTableRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_table + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetastoreService server. + """ + return request, metadata + + def post_update_table(self, response: metastore.Table) -> metastore.Table: + """Post-rpc interceptor for update_table + + Override in a subclass to manipulate the response + after it is returned by the MetastoreService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MetastoreServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: MetastoreServiceRestInterceptor + + +class MetastoreServiceRestTransport(MetastoreServiceTransport): + """REST backend transport for MetastoreService. + + BigLake Metastore is a serverless, highly available, multi-tenant + runtime metastore for Google Cloud Data Analytics products. + + The BigLake Metastore API defines the following resource model: + + - A collection of Google Cloud projects: ``/projects/*`` + - Each project has a collection of available locations: + ``/locations/*`` + - Each location has a collection of catalogs: ``/catalogs/*`` + - Each catalog has a collection of databases: ``/databases/*`` + - Each database has a collection of tables: ``/tables/*`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "biglake.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MetastoreServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MetastoreServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CheckLock(MetastoreServiceRestStub): + def __hash__(self): + return hash("CheckLock") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.CheckLockRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Lock: + r"""Call the check lock method over HTTP. + + Args: + request (~.metastore.CheckLockRequest): + The request object. Request message for the CheckLock + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Lock: + Represents a lock. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/locks/*}:check", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_check_lock(request, metadata) + pb_request = metastore.CheckLockRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Lock() + pb_resp = metastore.Lock.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_check_lock(resp) + return resp + + class _CreateCatalog(MetastoreServiceRestStub): + def __hash__(self): + return hash("CreateCatalog") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "catalogId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.CreateCatalogRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Call the create catalog method over HTTP. + + Args: + request (~.metastore.CreateCatalogRequest): + The request object. Request message for the CreateCatalog + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Catalog: + Catalog is the container of + databases. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/catalogs", + "body": "catalog", + }, + ] + request, metadata = self._interceptor.pre_create_catalog(request, metadata) + pb_request = metastore.CreateCatalogRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Catalog() + pb_resp = metastore.Catalog.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_catalog(resp) + return resp + + class _CreateDatabase(MetastoreServiceRestStub): + def __hash__(self): + return hash("CreateDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "databaseId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.CreateDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Call the create database method over HTTP. + + Args: + request (~.metastore.CreateDatabaseRequest): + The request object. Request message for the + CreateDatabase method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Database: + Database is the container of tables. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*/catalogs/*}/databases", + "body": "database", + }, + ] + request, metadata = self._interceptor.pre_create_database(request, metadata) + pb_request = metastore.CreateDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Database() + pb_resp = metastore.Database.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_database(resp) + return resp + + class _CreateLock(MetastoreServiceRestStub): + def __hash__(self): + return hash("CreateLock") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.CreateLockRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Lock: + r"""Call the create lock method over HTTP. + + Args: + request (~.metastore.CreateLockRequest): + The request object. Request message for the CreateLock + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Lock: + Represents a lock. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/locks", + "body": "lock", + }, + ] + request, metadata = self._interceptor.pre_create_lock(request, metadata) + pb_request = metastore.CreateLockRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Lock() + pb_resp = metastore.Lock.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_lock(resp) + return resp + + class _CreateTable(MetastoreServiceRestStub): + def __hash__(self): + return hash("CreateTable") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "tableId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.CreateTableRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Call the create table method over HTTP. + + Args: + request (~.metastore.CreateTableRequest): + The request object. Request message for the CreateTable + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Table: + Represents a table. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables", + "body": "table", + }, + ] + request, metadata = self._interceptor.pre_create_table(request, metadata) + pb_request = metastore.CreateTableRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Table() + pb_resp = metastore.Table.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_table(resp) + return resp + + class _DeleteCatalog(MetastoreServiceRestStub): + def __hash__(self): + return hash("DeleteCatalog") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.DeleteCatalogRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Call the delete catalog method over HTTP. + + Args: + request (~.metastore.DeleteCatalogRequest): + The request object. Request message for the DeleteCatalog + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Catalog: + Catalog is the container of + databases. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/catalogs/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_catalog(request, metadata) + pb_request = metastore.DeleteCatalogRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Catalog() + pb_resp = metastore.Catalog.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_catalog(resp) + return resp + + class _DeleteDatabase(MetastoreServiceRestStub): + def __hash__(self): + return hash("DeleteDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.DeleteDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Call the delete database method over HTTP. + + Args: + request (~.metastore.DeleteDatabaseRequest): + The request object. Request message for the + DeleteDatabase method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Database: + Database is the container of tables. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_database(request, metadata) + pb_request = metastore.DeleteDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Database() + pb_resp = metastore.Database.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_database(resp) + return resp + + class _DeleteLock(MetastoreServiceRestStub): + def __hash__(self): + return hash("DeleteLock") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.DeleteLockRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete lock method over HTTP. + + Args: + request (~.metastore.DeleteLockRequest): + The request object. Request message for the DeleteLock + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/locks/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_lock(request, metadata) + pb_request = metastore.DeleteLockRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteTable(MetastoreServiceRestStub): + def __hash__(self): + return hash("DeleteTable") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.DeleteTableRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Call the delete table method over HTTP. + + Args: + request (~.metastore.DeleteTableRequest): + The request object. Request message for the DeleteTable + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Table: + Represents a table. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_table(request, metadata) + pb_request = metastore.DeleteTableRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Table() + pb_resp = metastore.Table.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_table(resp) + return resp + + class _GetCatalog(MetastoreServiceRestStub): + def __hash__(self): + return hash("GetCatalog") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.GetCatalogRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Catalog: + r"""Call the get catalog method over HTTP. + + Args: + request (~.metastore.GetCatalogRequest): + The request object. Request message for the GetCatalog + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Catalog: + Catalog is the container of + databases. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/catalogs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_catalog(request, metadata) + pb_request = metastore.GetCatalogRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Catalog() + pb_resp = metastore.Catalog.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_catalog(resp) + return resp + + class _GetDatabase(MetastoreServiceRestStub): + def __hash__(self): + return hash("GetDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.GetDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Call the get database method over HTTP. + + Args: + request (~.metastore.GetDatabaseRequest): + The request object. Request message for the GetDatabase + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Database: + Database is the container of tables. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*}", + }, + ] + request, metadata = self._interceptor.pre_get_database(request, metadata) + pb_request = metastore.GetDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Database() + pb_resp = metastore.Database.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_database(resp) + return resp + + class _GetTable(MetastoreServiceRestStub): + def __hash__(self): + return hash("GetTable") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.GetTableRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Call the get table method over HTTP. + + Args: + request (~.metastore.GetTableRequest): + The request object. Request message for the GetTable + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Table: + Represents a table. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}", + }, + ] + request, metadata = self._interceptor.pre_get_table(request, metadata) + pb_request = metastore.GetTableRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Table() + pb_resp = metastore.Table.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_table(resp) + return resp + + class _ListCatalogs(MetastoreServiceRestStub): + def __hash__(self): + return hash("ListCatalogs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.ListCatalogsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.ListCatalogsResponse: + r"""Call the list catalogs method over HTTP. + + Args: + request (~.metastore.ListCatalogsRequest): + The request object. Request message for the ListCatalogs + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.ListCatalogsResponse: + Response message for the ListCatalogs + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/catalogs", + }, + ] + request, metadata = self._interceptor.pre_list_catalogs(request, metadata) + pb_request = metastore.ListCatalogsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.ListCatalogsResponse() + pb_resp = metastore.ListCatalogsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_catalogs(resp) + return resp + + class _ListDatabases(MetastoreServiceRestStub): + def __hash__(self): + return hash("ListDatabases") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.ListDatabasesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.ListDatabasesResponse: + r"""Call the list databases method over HTTP. + + Args: + request (~.metastore.ListDatabasesRequest): + The request object. Request message for the ListDatabases + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.ListDatabasesResponse: + Response message for the + ListDatabases method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*/catalogs/*}/databases", + }, + ] + request, metadata = self._interceptor.pre_list_databases(request, metadata) + pb_request = metastore.ListDatabasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.ListDatabasesResponse() + pb_resp = metastore.ListDatabasesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_databases(resp) + return resp + + class _ListLocks(MetastoreServiceRestStub): + def __hash__(self): + return hash("ListLocks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.ListLocksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.ListLocksResponse: + r"""Call the list locks method over HTTP. + + Args: + request (~.metastore.ListLocksRequest): + The request object. Request message for the ListLocks + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.ListLocksResponse: + Response message for the ListLocks + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/locks", + }, + ] + request, metadata = self._interceptor.pre_list_locks(request, metadata) + pb_request = metastore.ListLocksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.ListLocksResponse() + pb_resp = metastore.ListLocksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_locks(resp) + return resp + + class _ListTables(MetastoreServiceRestStub): + def __hash__(self): + return hash("ListTables") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.ListTablesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.ListTablesResponse: + r"""Call the list tables method over HTTP. + + Args: + request (~.metastore.ListTablesRequest): + The request object. Request message for the ListTables + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.ListTablesResponse: + Response message for the ListTables + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables", + }, + ] + request, metadata = self._interceptor.pre_list_tables(request, metadata) + pb_request = metastore.ListTablesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.ListTablesResponse() + pb_resp = metastore.ListTablesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_tables(resp) + return resp + + class _RenameTable(MetastoreServiceRestStub): + def __hash__(self): + return hash("RenameTable") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.RenameTableRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Call the rename table method over HTTP. + + Args: + request (~.metastore.RenameTableRequest): + The request object. Request message for the RenameTable + method in MetastoreService + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Table: + Represents a table. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}:rename", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_rename_table(request, metadata) + pb_request = metastore.RenameTableRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Table() + pb_resp = metastore.Table.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_rename_table(resp) + return resp + + class _UpdateDatabase(MetastoreServiceRestStub): + def __hash__(self): + return hash("UpdateDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.UpdateDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Database: + r"""Call the update database method over HTTP. + + Args: + request (~.metastore.UpdateDatabaseRequest): + The request object. Request message for the + UpdateDatabase method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Database: + Database is the container of tables. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{database.name=projects/*/locations/*/catalogs/*/databases/*}", + "body": "database", + }, + ] + request, metadata = self._interceptor.pre_update_database(request, metadata) + pb_request = metastore.UpdateDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Database() + pb_resp = metastore.Database.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_database(resp) + return resp + + class _UpdateTable(MetastoreServiceRestStub): + def __hash__(self): + return hash("UpdateTable") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: metastore.UpdateTableRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Table: + r"""Call the update table method over HTTP. + + Args: + request (~.metastore.UpdateTableRequest): + The request object. Request message for the UpdateTable + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metastore.Table: + Represents a table. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{table.name=projects/*/locations/*/catalogs/*/databases/*/tables/*}", + "body": "table", + }, + ] + request, metadata = self._interceptor.pre_update_table(request, metadata) + pb_request = metastore.UpdateTableRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metastore.Table() + pb_resp = metastore.Table.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_table(resp) + return resp + + @property + def check_lock(self) -> Callable[[metastore.CheckLockRequest], metastore.Lock]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CheckLock(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_catalog( + self, + ) -> Callable[[metastore.CreateCatalogRequest], metastore.Catalog]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCatalog(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_database( + self, + ) -> Callable[[metastore.CreateDatabaseRequest], metastore.Database]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_lock(self) -> Callable[[metastore.CreateLockRequest], metastore.Lock]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateLock(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_table(self) -> Callable[[metastore.CreateTableRequest], metastore.Table]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTable(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_catalog( + self, + ) -> Callable[[metastore.DeleteCatalogRequest], metastore.Catalog]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCatalog(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_database( + self, + ) -> Callable[[metastore.DeleteDatabaseRequest], metastore.Database]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_lock(self) -> Callable[[metastore.DeleteLockRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteLock(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_table(self) -> Callable[[metastore.DeleteTableRequest], metastore.Table]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTable(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_catalog(self) -> Callable[[metastore.GetCatalogRequest], metastore.Catalog]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCatalog(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_database( + self, + ) -> Callable[[metastore.GetDatabaseRequest], metastore.Database]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_table(self) -> Callable[[metastore.GetTableRequest], metastore.Table]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTable(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_catalogs( + self, + ) -> Callable[[metastore.ListCatalogsRequest], metastore.ListCatalogsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCatalogs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_databases( + self, + ) -> Callable[[metastore.ListDatabasesRequest], metastore.ListDatabasesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDatabases(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_locks( + self, + ) -> Callable[[metastore.ListLocksRequest], metastore.ListLocksResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListLocks(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_tables( + self, + ) -> Callable[[metastore.ListTablesRequest], metastore.ListTablesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTables(self._session, self._host, self._interceptor) # type: ignore + + @property + def rename_table(self) -> Callable[[metastore.RenameTableRequest], metastore.Table]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RenameTable(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_database( + self, + ) -> Callable[[metastore.UpdateDatabaseRequest], metastore.Database]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_table(self) -> Callable[[metastore.UpdateTableRequest], metastore.Table]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTable(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("MetastoreServiceRestTransport",) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/types/__init__.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/types/__init__.py new file mode 100644 index 000000000000..425394b73e89 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/types/__init__.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .metastore import ( + Catalog, + CheckLockRequest, + CreateCatalogRequest, + CreateDatabaseRequest, + CreateLockRequest, + CreateTableRequest, + Database, + DeleteCatalogRequest, + DeleteDatabaseRequest, + DeleteLockRequest, + DeleteTableRequest, + GetCatalogRequest, + GetDatabaseRequest, + GetTableRequest, + HiveDatabaseOptions, + HiveTableOptions, + ListCatalogsRequest, + ListCatalogsResponse, + ListDatabasesRequest, + ListDatabasesResponse, + ListLocksRequest, + ListLocksResponse, + ListTablesRequest, + ListTablesResponse, + Lock, + RenameTableRequest, + Table, + TableView, + UpdateDatabaseRequest, + UpdateTableRequest, +) + +__all__ = ( + "Catalog", + "CheckLockRequest", + "CreateCatalogRequest", + "CreateDatabaseRequest", + "CreateLockRequest", + "CreateTableRequest", + "Database", + "DeleteCatalogRequest", + "DeleteDatabaseRequest", + "DeleteLockRequest", + "DeleteTableRequest", + "GetCatalogRequest", + "GetDatabaseRequest", + "GetTableRequest", + "HiveDatabaseOptions", + "HiveTableOptions", + "ListCatalogsRequest", + "ListCatalogsResponse", + "ListDatabasesRequest", + "ListDatabasesResponse", + "ListLocksRequest", + "ListLocksResponse", + "ListTablesRequest", + "ListTablesResponse", + "Lock", + "RenameTableRequest", + "Table", + "UpdateDatabaseRequest", + "UpdateTableRequest", + "TableView", +) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/types/metastore.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/types/metastore.py new file mode 100644 index 000000000000..858b911b14bf --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery/biglake_v1alpha1/types/metastore.py @@ -0,0 +1,1067 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.bigquery.biglake.v1alpha1", + manifest={ + "TableView", + "Catalog", + "Database", + "Table", + "Lock", + "CreateCatalogRequest", + "DeleteCatalogRequest", + "GetCatalogRequest", + "ListCatalogsRequest", + "ListCatalogsResponse", + "CreateDatabaseRequest", + "DeleteDatabaseRequest", + "UpdateDatabaseRequest", + "GetDatabaseRequest", + "ListDatabasesRequest", + "ListDatabasesResponse", + "CreateTableRequest", + "DeleteTableRequest", + "UpdateTableRequest", + "RenameTableRequest", + "GetTableRequest", + "ListTablesRequest", + "ListTablesResponse", + "CreateLockRequest", + "DeleteLockRequest", + "CheckLockRequest", + "ListLocksRequest", + "ListLocksResponse", + "HiveDatabaseOptions", + "HiveTableOptions", + }, +) + + +class TableView(proto.Enum): + r"""View on Table. Represents which fields will be populated for + calls that return Table objects. + + Values: + TABLE_VIEW_UNSPECIFIED (0): + Default value. The API will default to the + BASIC view. + BASIC (1): + Include only table names. + This is the default value. + FULL (2): + Include everything. + """ + TABLE_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + +class Catalog(proto.Message): + r"""Catalog is the container of databases. + + Attributes: + name (str): + Output only. The resource name. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time of the + catalog. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last modification time of + the catalog. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The deletion time of the + catalog. Only set after the catalog is deleted. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when this catalog is + considered expired. Only set after the catalog + is deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class Database(proto.Message): + r"""Database is the container of tables. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hive_options (google.cloud.bigquery.biglake_v1alpha1.types.HiveDatabaseOptions): + Options of a Hive database. + + This field is a member of `oneof`_ ``options``. + name (str): + Output only. The resource name. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time of the + database. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last modification time of + the database. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The deletion time of the + database. Only set after the database is + deleted. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when this database is + considered expired. Only set after the database + is deleted. + type_ (google.cloud.bigquery.biglake_v1alpha1.types.Database.Type): + The database type. + """ + + class Type(proto.Enum): + r"""The database type. + + Values: + TYPE_UNSPECIFIED (0): + The type is not specified. + HIVE (1): + Represents a database storing tables + compatible with Hive Metastore tables. + """ + TYPE_UNSPECIFIED = 0 + HIVE = 1 + + hive_options: "HiveDatabaseOptions" = proto.Field( + proto.MESSAGE, + number=7, + oneof="options", + message="HiveDatabaseOptions", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + type_: Type = proto.Field( + proto.ENUM, + number=6, + enum=Type, + ) + + +class Table(proto.Message): + r"""Represents a table. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hive_options (google.cloud.bigquery.biglake_v1alpha1.types.HiveTableOptions): + Options of a Hive table. + + This field is a member of `oneof`_ ``options``. + name (str): + Output only. The resource name. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time of the table. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last modification time of + the table. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The deletion time of the table. + Only set after the table is deleted. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when this table is + considered expired. Only set after the table is + deleted. + type_ (google.cloud.bigquery.biglake_v1alpha1.types.Table.Type): + The table type. + etag (str): + The checksum of a table object computed by + the server based on the value of other fields. + It may be sent on update requests to ensure the + client has an up-to-date value before + proceeding. It is only checked for update table + operations. + """ + + class Type(proto.Enum): + r"""The table type. + + Values: + TYPE_UNSPECIFIED (0): + The type is not specified. + HIVE (1): + Represents a table compatible with Hive + Metastore tables. + """ + TYPE_UNSPECIFIED = 0 + HIVE = 1 + + hive_options: "HiveTableOptions" = proto.Field( + proto.MESSAGE, + number=7, + oneof="options", + message="HiveTableOptions", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + type_: Type = proto.Field( + proto.ENUM, + number=6, + enum=Type, + ) + etag: str = proto.Field( + proto.STRING, + number=8, + ) + + +class Lock(proto.Message): + r"""Represents a lock. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + table_id (str): + The table ID (not fully qualified name) in + the same database that the lock will be created + on. The table must exist. + + This field is a member of `oneof`_ ``resources``. + name (str): + Output only. The resource name. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time of the lock. + type_ (google.cloud.bigquery.biglake_v1alpha1.types.Lock.Type): + The lock type. + state (google.cloud.bigquery.biglake_v1alpha1.types.Lock.State): + Output only. The lock state. + """ + + class Type(proto.Enum): + r"""The lock type. + + Values: + TYPE_UNSPECIFIED (0): + The type is not specified. + EXCLUSIVE (1): + An exclusive lock prevents another lock from + being created on the same resource. + """ + TYPE_UNSPECIFIED = 0 + EXCLUSIVE = 1 + + class State(proto.Enum): + r"""The lock state. + + Values: + STATE_UNSPECIFIED (0): + The state is not specified. + WAITING (1): + Waiting to acquire the lock. + ACQUIRED (2): + The lock has been acquired. + """ + STATE_UNSPECIFIED = 0 + WAITING = 1 + ACQUIRED = 2 + + table_id: str = proto.Field( + proto.STRING, + number=5, + oneof="resources", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + type_: Type = proto.Field( + proto.ENUM, + number=3, + enum=Type, + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + + +class CreateCatalogRequest(proto.Message): + r"""Request message for the CreateCatalog method. + + Attributes: + parent (str): + Required. The parent resource where this catalog will be + created. Format: + projects/{project_id_or_number}/locations/{location_id} + catalog (google.cloud.bigquery.biglake_v1alpha1.types.Catalog): + Required. The catalog to create. The ``name`` field does not + need to be provided. + catalog_id (str): + Required. The ID to use for the catalog, + which will become the final component of the + catalog's resource name. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + catalog: "Catalog" = proto.Field( + proto.MESSAGE, + number=2, + message="Catalog", + ) + catalog_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteCatalogRequest(proto.Message): + r"""Request message for the DeleteCatalog method. + + Attributes: + name (str): + Required. The name of the catalog to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetCatalogRequest(proto.Message): + r"""Request message for the GetCatalog method. + + Attributes: + name (str): + Required. The name of the catalog to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListCatalogsRequest(proto.Message): + r"""Request message for the ListCatalogs method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + catalogs. Format: + projects/{project_id_or_number}/locations/{location_id} + page_size (int): + The maximum number of catalogs to return. The + service may return fewer than this value. + If unspecified, at most 50 catalogs will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + A page token, received from a previous ``ListCatalogs`` + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListCatalogs`` must match the call that provided the page + token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListCatalogsResponse(proto.Message): + r"""Response message for the ListCatalogs method. + + Attributes: + catalogs (MutableSequence[google.cloud.bigquery.biglake_v1alpha1.types.Catalog]): + The catalogs from the specified project. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + catalogs: MutableSequence["Catalog"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Catalog", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateDatabaseRequest(proto.Message): + r"""Request message for the CreateDatabase method. + + Attributes: + parent (str): + Required. The parent resource where this database will be + created. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + database (google.cloud.bigquery.biglake_v1alpha1.types.Database): + Required. The database to create. The ``name`` field does + not need to be provided. + database_id (str): + Required. The ID to use for the database, + which will become the final component of the + database's resource name. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + database: "Database" = proto.Field( + proto.MESSAGE, + number=2, + message="Database", + ) + database_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteDatabaseRequest(proto.Message): + r"""Request message for the DeleteDatabase method. + + Attributes: + name (str): + Required. The name of the database to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDatabaseRequest(proto.Message): + r"""Request message for the UpdateDatabase method. + + Attributes: + database (google.cloud.bigquery.biglake_v1alpha1.types.Database): + Required. The database to update. + + The database's ``name`` field is used to identify the + database to update. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. + + For the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are allowed + to update. + """ + + database: "Database" = proto.Field( + proto.MESSAGE, + number=1, + message="Database", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetDatabaseRequest(proto.Message): + r"""Request message for the GetDatabase method. + + Attributes: + name (str): + Required. The name of the database to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDatabasesRequest(proto.Message): + r"""Request message for the ListDatabases method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + databases. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + page_size (int): + The maximum number of databases to return. + The service may return fewer than this value. If + unspecified, at most 50 databases will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + A page token, received from a previous ``ListDatabases`` + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListDatabases`` must match the call that provided the page + token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDatabasesResponse(proto.Message): + r"""Response message for the ListDatabases method. + + Attributes: + databases (MutableSequence[google.cloud.bigquery.biglake_v1alpha1.types.Database]): + The databases from the specified catalog. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + databases: MutableSequence["Database"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Database", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateTableRequest(proto.Message): + r"""Request message for the CreateTable method. + + Attributes: + parent (str): + Required. The parent resource where this table will be + created. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + table (google.cloud.bigquery.biglake_v1alpha1.types.Table): + Required. The table to create. The ``name`` field does not + need to be provided for the table creation. + table_id (str): + Required. The ID to use for the table, which + will become the final component of the table's + resource name. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + table: "Table" = proto.Field( + proto.MESSAGE, + number=2, + message="Table", + ) + table_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteTableRequest(proto.Message): + r"""Request message for the DeleteTable method. + + Attributes: + name (str): + Required. The name of the table to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateTableRequest(proto.Message): + r"""Request message for the UpdateTable method. + + Attributes: + table (google.cloud.bigquery.biglake_v1alpha1.types.Table): + Required. The table to update. + + The table's ``name`` field is used to identify the table to + update. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. + + For the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are allowed + to update. + """ + + table: "Table" = proto.Field( + proto.MESSAGE, + number=1, + message="Table", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class RenameTableRequest(proto.Message): + r"""Request message for the RenameTable method in + MetastoreService + + Attributes: + name (str): + Required. The table's ``name`` field is used to identify the + table to rename. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + new_name (str): + Required. The new ``name`` for the specified table, must be + in the same database. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + new_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTableRequest(proto.Message): + r"""Request message for the GetTable method. + + Attributes: + name (str): + Required. The name of the table to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTablesRequest(proto.Message): + r"""Request message for the ListTables method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of tables. + Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + page_size (int): + The maximum number of tables to return. The + service may return fewer than this value. + If unspecified, at most 50 tables will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + A page token, received from a previous ``ListTables`` call. + Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListTables`` must match the call that provided the page + token. + view (google.cloud.bigquery.biglake_v1alpha1.types.TableView): + The view for the returned tables. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + view: "TableView" = proto.Field( + proto.ENUM, + number=4, + enum="TableView", + ) + + +class ListTablesResponse(proto.Message): + r"""Response message for the ListTables method. + + Attributes: + tables (MutableSequence[google.cloud.bigquery.biglake_v1alpha1.types.Table]): + The tables from the specified database. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + tables: MutableSequence["Table"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Table", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateLockRequest(proto.Message): + r"""Request message for the CreateLock method. + + Attributes: + parent (str): + Required. The parent resource where this lock will be + created. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + lock (google.cloud.bigquery.biglake_v1alpha1.types.Lock): + Required. The lock to create. The ``name`` field does not + need to be provided for the lock creation. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + lock: "Lock" = proto.Field( + proto.MESSAGE, + number=2, + message="Lock", + ) + + +class DeleteLockRequest(proto.Message): + r"""Request message for the DeleteLock method. + + Attributes: + name (str): + Required. The name of the lock to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CheckLockRequest(proto.Message): + r"""Request message for the CheckLock method. + + Attributes: + name (str): + Required. The name of the lock to check. Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListLocksRequest(proto.Message): + r"""Request message for the ListLocks method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of locks. + Format: + projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + page_size (int): + The maximum number of locks to return. The + service may return fewer than this value. + If unspecified, at most 50 locks will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + A page token, received from a previous ``ListLocks`` call. + Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListLocks`` must match the call that provided the page + token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListLocksResponse(proto.Message): + r"""Response message for the ListLocks method. + + Attributes: + locks (MutableSequence[google.cloud.bigquery.biglake_v1alpha1.types.Lock]): + The locks from the specified database. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + locks: MutableSequence["Lock"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Lock", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class HiveDatabaseOptions(proto.Message): + r"""Options of a Hive database. + + Attributes: + location_uri (str): + Cloud Storage folder URI where the database + data is stored, starting with "gs://". + parameters (MutableMapping[str, str]): + Stores user supplied Hive database + parameters. + """ + + location_uri: str = proto.Field( + proto.STRING, + number=1, + ) + parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + + +class HiveTableOptions(proto.Message): + r"""Options of a Hive table. + + Attributes: + parameters (MutableMapping[str, str]): + Stores user supplied Hive table parameters. + table_type (str): + Hive table type. For example, MANAGED_TABLE, EXTERNAL_TABLE. + storage_descriptor (google.cloud.bigquery.biglake_v1alpha1.types.HiveTableOptions.StorageDescriptor): + Stores physical storage information of the + data. + """ + + class SerDeInfo(proto.Message): + r"""Serializer and deserializer information. + + Attributes: + serialization_lib (str): + The fully qualified Java class name of the + serialization library. + """ + + serialization_lib: str = proto.Field( + proto.STRING, + number=1, + ) + + class StorageDescriptor(proto.Message): + r"""Stores physical storage information of the data. + + Attributes: + location_uri (str): + Cloud Storage folder URI where the table data + is stored, starting with "gs://". + input_format (str): + The fully qualified Java class name of the + input format. + output_format (str): + The fully qualified Java class name of the + output format. + serde_info (google.cloud.bigquery.biglake_v1alpha1.types.HiveTableOptions.SerDeInfo): + Serializer and deserializer information. + """ + + location_uri: str = proto.Field( + proto.STRING, + number=1, + ) + input_format: str = proto.Field( + proto.STRING, + number=2, + ) + output_format: str = proto.Field( + proto.STRING, + number=3, + ) + serde_info: "HiveTableOptions.SerDeInfo" = proto.Field( + proto.MESSAGE, + number=4, + message="HiveTableOptions.SerDeInfo", + ) + + parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + table_type: str = proto.Field( + proto.STRING, + number=2, + ) + storage_descriptor: StorageDescriptor = proto.Field( + proto.MESSAGE, + number=3, + message=StorageDescriptor, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-biglake/mypy.ini b/packages/google-cloud-bigquery-biglake/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-bigquery-biglake/noxfile.py b/packages/google-cloud-bigquery-biglake/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-cloud-bigquery-biglake/renovate.json b/packages/google-cloud-bigquery-biglake/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_catalog_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_catalog_async.py new file mode 100644 index 000000000000..29bcd4af1f3a --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_catalog_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_CreateCatalog_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +async def sample_create_catalog(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.CreateCatalogRequest( + parent="parent_value", + catalog_id="catalog_id_value", + ) + + # Make the request + response = await client.create_catalog(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_CreateCatalog_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_catalog_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_catalog_sync.py new file mode 100644 index 000000000000..4e3fda6b06f4 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_catalog_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_CreateCatalog_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +def sample_create_catalog(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.CreateCatalogRequest( + parent="parent_value", + catalog_id="catalog_id_value", + ) + + # Make the request + response = client.create_catalog(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_CreateCatalog_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_database_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_database_async.py new file mode 100644 index 000000000000..aeb64a8a95b3 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_database_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_CreateDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +async def sample_create_database(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = await client.create_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_CreateDatabase_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_database_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_database_sync.py new file mode 100644 index 000000000000..1280f6d40c42 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_database_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_CreateDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +def sample_create_database(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = client.create_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_CreateDatabase_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_table_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_table_async.py new file mode 100644 index 000000000000..a2281594e2bd --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_table_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_CreateTable_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +async def sample_create_table(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.CreateTableRequest( + parent="parent_value", + table_id="table_id_value", + ) + + # Make the request + response = await client.create_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_CreateTable_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_table_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_table_sync.py new file mode 100644 index 000000000000..c6292c215665 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_create_table_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_CreateTable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +def sample_create_table(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.CreateTableRequest( + parent="parent_value", + table_id="table_id_value", + ) + + # Make the request + response = client.create_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_CreateTable_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_catalog_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_catalog_async.py new file mode 100644 index 000000000000..66408439e940 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_catalog_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_DeleteCatalog_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +async def sample_delete_catalog(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.DeleteCatalogRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_catalog(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_DeleteCatalog_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_catalog_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_catalog_sync.py new file mode 100644 index 000000000000..b1b3d2759d47 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_catalog_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_DeleteCatalog_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +def sample_delete_catalog(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.DeleteCatalogRequest( + name="name_value", + ) + + # Make the request + response = client.delete_catalog(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_DeleteCatalog_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_database_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_database_async.py new file mode 100644 index 000000000000..1d51834fe4b1 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_database_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_DeleteDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +async def sample_delete_database(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.DeleteDatabaseRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_DeleteDatabase_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_database_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_database_sync.py new file mode 100644 index 000000000000..a4b1cf6315e3 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_database_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_DeleteDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +def sample_delete_database(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.DeleteDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.delete_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_DeleteDatabase_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_table_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_table_async.py new file mode 100644 index 000000000000..b80279906b16 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_table_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_DeleteTable_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +async def sample_delete_table(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.DeleteTableRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_DeleteTable_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_table_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_table_sync.py new file mode 100644 index 000000000000..1a589e16b5ed --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_delete_table_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_DeleteTable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +def sample_delete_table(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.DeleteTableRequest( + name="name_value", + ) + + # Make the request + response = client.delete_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_DeleteTable_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_catalog_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_catalog_async.py new file mode 100644 index 000000000000..46ff6b012b52 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_catalog_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_GetCatalog_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +async def sample_get_catalog(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.GetCatalogRequest( + name="name_value", + ) + + # Make the request + response = await client.get_catalog(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_GetCatalog_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_catalog_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_catalog_sync.py new file mode 100644 index 000000000000..50794f0a6d8d --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_catalog_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_GetCatalog_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +def sample_get_catalog(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.GetCatalogRequest( + name="name_value", + ) + + # Make the request + response = client.get_catalog(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_GetCatalog_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_database_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_database_async.py new file mode 100644 index 000000000000..d8de2807e9f7 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_database_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_GetDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +async def sample_get_database(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = await client.get_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_GetDatabase_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_database_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_database_sync.py new file mode 100644 index 000000000000..dc5d9846a679 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_database_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_GetDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +def sample_get_database(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_GetDatabase_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_table_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_table_async.py new file mode 100644 index 000000000000..f8b08935890e --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_table_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_GetTable_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +async def sample_get_table(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.GetTableRequest( + name="name_value", + ) + + # Make the request + response = await client.get_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_GetTable_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_table_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_table_sync.py new file mode 100644 index 000000000000..0e275b0d386d --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_get_table_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_GetTable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +def sample_get_table(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.GetTableRequest( + name="name_value", + ) + + # Make the request + response = client.get_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_GetTable_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_catalogs_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_catalogs_async.py new file mode 100644 index 000000000000..d7c33f0e9bfd --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_catalogs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCatalogs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_ListCatalogs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +async def sample_list_catalogs(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.ListCatalogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_catalogs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END biglake_v1_generated_MetastoreService_ListCatalogs_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_catalogs_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_catalogs_sync.py new file mode 100644 index 000000000000..828b60808a01 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_catalogs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCatalogs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_ListCatalogs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +def sample_list_catalogs(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.ListCatalogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_catalogs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END biglake_v1_generated_MetastoreService_ListCatalogs_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_databases_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_databases_async.py new file mode 100644 index 000000000000..39f17d61b7b6 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_databases_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_ListDatabases_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +async def sample_list_databases(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END biglake_v1_generated_MetastoreService_ListDatabases_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_databases_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_databases_sync.py new file mode 100644 index 000000000000..e27d84b0b8fc --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_databases_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_ListDatabases_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +def sample_list_databases(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END biglake_v1_generated_MetastoreService_ListDatabases_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_tables_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_tables_async.py new file mode 100644 index 000000000000..7324c26a8d57 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_tables_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTables +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_ListTables_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +async def sample_list_tables(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.ListTablesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tables(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END biglake_v1_generated_MetastoreService_ListTables_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_tables_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_tables_sync.py new file mode 100644 index 000000000000..4bc5dcb71c3e --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_list_tables_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTables +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_ListTables_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +def sample_list_tables(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.ListTablesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tables(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END biglake_v1_generated_MetastoreService_ListTables_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_rename_table_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_rename_table_async.py new file mode 100644 index 000000000000..278f501d6508 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_rename_table_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_RenameTable_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +async def sample_rename_table(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.RenameTableRequest( + name="name_value", + new_name="new_name_value", + ) + + # Make the request + response = await client.rename_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_RenameTable_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_rename_table_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_rename_table_sync.py new file mode 100644 index 000000000000..e2e9c15bec69 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_rename_table_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_RenameTable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +def sample_rename_table(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.RenameTableRequest( + name="name_value", + new_name="new_name_value", + ) + + # Make the request + response = client.rename_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_RenameTable_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_update_database_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_update_database_async.py new file mode 100644 index 000000000000..0a6751ce4cb6 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_update_database_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_UpdateDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +async def sample_update_database(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.UpdateDatabaseRequest( + ) + + # Make the request + response = await client.update_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_UpdateDatabase_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_update_database_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_update_database_sync.py new file mode 100644 index 000000000000..7df7f9a7ad6e --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_update_database_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_UpdateDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +def sample_update_database(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.UpdateDatabaseRequest( + ) + + # Make the request + response = client.update_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_UpdateDatabase_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_update_table_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_update_table_async.py new file mode 100644 index 000000000000..738e43ce9bea --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_update_table_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_UpdateTable_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +async def sample_update_table(): + # Create a client + client = biglake_v1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1.UpdateTableRequest( + ) + + # Make the request + response = await client.update_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_UpdateTable_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_update_table_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_update_table_sync.py new file mode 100644 index 000000000000..5f4c0de22d1d --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1_generated_metastore_service_update_table_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1_generated_MetastoreService_UpdateTable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1 + + +def sample_update_table(): + # Create a client + client = biglake_v1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1.UpdateTableRequest( + ) + + # Make the request + response = client.update_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1_generated_MetastoreService_UpdateTable_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_check_lock_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_check_lock_async.py new file mode 100644 index 000000000000..fdd903970a41 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_check_lock_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckLock +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_CheckLock_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_check_lock(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CheckLockRequest( + name="name_value", + ) + + # Make the request + response = await client.check_lock(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_CheckLock_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_check_lock_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_check_lock_sync.py new file mode 100644 index 000000000000..04835f639b88 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_check_lock_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckLock +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_CheckLock_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_check_lock(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CheckLockRequest( + name="name_value", + ) + + # Make the request + response = client.check_lock(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_CheckLock_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_catalog_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_catalog_async.py new file mode 100644 index 000000000000..87ec2b389573 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_catalog_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_CreateCatalog_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_create_catalog(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CreateCatalogRequest( + parent="parent_value", + catalog_id="catalog_id_value", + ) + + # Make the request + response = await client.create_catalog(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_CreateCatalog_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_catalog_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_catalog_sync.py new file mode 100644 index 000000000000..2880c5d14b09 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_catalog_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_CreateCatalog_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_create_catalog(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CreateCatalogRequest( + parent="parent_value", + catalog_id="catalog_id_value", + ) + + # Make the request + response = client.create_catalog(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_CreateCatalog_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_database_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_database_async.py new file mode 100644 index 000000000000..19f161112382 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_database_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_CreateDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_create_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = await client.create_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_CreateDatabase_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_database_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_database_sync.py new file mode 100644 index 000000000000..76632d859131 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_database_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_CreateDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_create_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = client.create_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_CreateDatabase_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_lock_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_lock_async.py new file mode 100644 index 000000000000..6fe318883fba --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_lock_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLock +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_CreateLock_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_create_lock(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + lock = biglake_v1alpha1.Lock() + lock.table_id = "table_id_value" + + request = biglake_v1alpha1.CreateLockRequest( + parent="parent_value", + lock=lock, + ) + + # Make the request + response = await client.create_lock(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_CreateLock_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_lock_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_lock_sync.py new file mode 100644 index 000000000000..6030b8b02877 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_lock_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLock +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_CreateLock_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_create_lock(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + lock = biglake_v1alpha1.Lock() + lock.table_id = "table_id_value" + + request = biglake_v1alpha1.CreateLockRequest( + parent="parent_value", + lock=lock, + ) + + # Make the request + response = client.create_lock(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_CreateLock_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_table_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_table_async.py new file mode 100644 index 000000000000..8dd714d1acb2 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_table_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_CreateTable_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_create_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CreateTableRequest( + parent="parent_value", + table_id="table_id_value", + ) + + # Make the request + response = await client.create_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_CreateTable_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_table_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_table_sync.py new file mode 100644 index 000000000000..b48c2b61b452 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_table_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_CreateTable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_create_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.CreateTableRequest( + parent="parent_value", + table_id="table_id_value", + ) + + # Make the request + response = client.create_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_CreateTable_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_catalog_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_catalog_async.py new file mode 100644 index 000000000000..2a8e98ba2b9f --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_catalog_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_delete_catalog(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteCatalogRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_catalog(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_catalog_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_catalog_sync.py new file mode 100644 index 000000000000..c618f13f57fb --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_catalog_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_delete_catalog(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteCatalogRequest( + name="name_value", + ) + + # Make the request + response = client.delete_catalog(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_database_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_database_async.py new file mode 100644 index 000000000000..f091a48b302f --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_database_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_delete_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteDatabaseRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_database_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_database_sync.py new file mode 100644 index 000000000000..ec80d6779273 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_database_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_delete_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.delete_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_lock_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_lock_async.py new file mode 100644 index 000000000000..dd626b742077 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_lock_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLock +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_DeleteLock_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_delete_lock(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteLockRequest( + name="name_value", + ) + + # Make the request + await client.delete_lock(request=request) + + +# [END biglake_v1alpha1_generated_MetastoreService_DeleteLock_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_lock_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_lock_sync.py new file mode 100644 index 000000000000..17c9052e119e --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_lock_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLock +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_DeleteLock_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_delete_lock(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteLockRequest( + name="name_value", + ) + + # Make the request + client.delete_lock(request=request) + + +# [END biglake_v1alpha1_generated_MetastoreService_DeleteLock_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_table_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_table_async.py new file mode 100644 index 000000000000..f0b112f22177 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_table_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_DeleteTable_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_delete_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteTableRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_DeleteTable_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_table_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_table_sync.py new file mode 100644 index 000000000000..365c90460b05 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_table_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_DeleteTable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_delete_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.DeleteTableRequest( + name="name_value", + ) + + # Make the request + response = client.delete_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_DeleteTable_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_catalog_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_catalog_async.py new file mode 100644 index 000000000000..422f76dc8634 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_catalog_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_GetCatalog_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_get_catalog(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.GetCatalogRequest( + name="name_value", + ) + + # Make the request + response = await client.get_catalog(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_GetCatalog_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_catalog_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_catalog_sync.py new file mode 100644 index 000000000000..4499fdae7c53 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_catalog_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_GetCatalog_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_get_catalog(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.GetCatalogRequest( + name="name_value", + ) + + # Make the request + response = client.get_catalog(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_GetCatalog_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_database_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_database_async.py new file mode 100644 index 000000000000..8921ef2b991b --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_database_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_GetDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_get_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = await client.get_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_GetDatabase_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_database_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_database_sync.py new file mode 100644 index 000000000000..f32fc245fef5 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_database_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_GetDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_get_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_GetDatabase_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_table_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_table_async.py new file mode 100644 index 000000000000..7e8f3a06de17 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_table_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_GetTable_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_get_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.GetTableRequest( + name="name_value", + ) + + # Make the request + response = await client.get_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_GetTable_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_table_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_table_sync.py new file mode 100644 index 000000000000..680e133a2176 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_table_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_GetTable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_get_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.GetTableRequest( + name="name_value", + ) + + # Make the request + response = client.get_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_GetTable_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_catalogs_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_catalogs_async.py new file mode 100644 index 000000000000..78fc7b7d5128 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_catalogs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCatalogs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_ListCatalogs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_list_catalogs(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListCatalogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_catalogs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_ListCatalogs_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_catalogs_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_catalogs_sync.py new file mode 100644 index 000000000000..8f55ed8d7e35 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_catalogs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCatalogs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_ListCatalogs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_list_catalogs(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListCatalogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_catalogs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_ListCatalogs_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_databases_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_databases_async.py new file mode 100644 index 000000000000..4b11c6d4b0f3 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_databases_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_ListDatabases_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_list_databases(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_ListDatabases_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_databases_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_databases_sync.py new file mode 100644 index 000000000000..020716f39aa5 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_databases_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_ListDatabases_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_list_databases(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_ListDatabases_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_locks_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_locks_async.py new file mode 100644 index 000000000000..24f54febd605 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_locks_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLocks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_ListLocks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_list_locks(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListLocksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_locks(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_ListLocks_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_locks_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_locks_sync.py new file mode 100644 index 000000000000..7c08e94944d3 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_locks_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLocks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_ListLocks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_list_locks(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListLocksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_locks(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_ListLocks_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_tables_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_tables_async.py new file mode 100644 index 000000000000..2f55658b9780 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_tables_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTables +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_ListTables_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_list_tables(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListTablesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tables(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_ListTables_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_tables_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_tables_sync.py new file mode 100644 index 000000000000..8c0c38ae34bb --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_tables_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTables +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_ListTables_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_list_tables(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.ListTablesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tables(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_ListTables_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_rename_table_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_rename_table_async.py new file mode 100644 index 000000000000..1fd41107bdf5 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_rename_table_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_RenameTable_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_rename_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.RenameTableRequest( + name="name_value", + new_name="new_name_value", + ) + + # Make the request + response = await client.rename_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_RenameTable_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_rename_table_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_rename_table_sync.py new file mode 100644 index 000000000000..4262c387d158 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_rename_table_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_RenameTable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_rename_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.RenameTableRequest( + name="name_value", + new_name="new_name_value", + ) + + # Make the request + response = client.rename_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_RenameTable_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_database_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_database_async.py new file mode 100644 index 000000000000..3957ee8ef208 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_database_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_update_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.UpdateDatabaseRequest( + ) + + # Make the request + response = await client.update_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_database_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_database_sync.py new file mode 100644 index 000000000000..2158e0b1f965 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_database_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_update_database(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.UpdateDatabaseRequest( + ) + + # Make the request + response = client.update_database(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_table_async.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_table_async.py new file mode 100644 index 000000000000..55e2691061e9 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_table_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_UpdateTable_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +async def sample_update_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceAsyncClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.UpdateTableRequest( + ) + + # Make the request + response = await client.update_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_UpdateTable_async] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_table_sync.py b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_table_sync.py new file mode 100644 index 000000000000..8bc3db1ec7d7 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_table_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-biglake + + +# [START biglake_v1alpha1_generated_MetastoreService_UpdateTable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.bigquery import biglake_v1alpha1 + + +def sample_update_table(): + # Create a client + client = biglake_v1alpha1.MetastoreServiceClient() + + # Initialize request argument(s) + request = biglake_v1alpha1.UpdateTableRequest( + ) + + # Make the request + response = client.update_table(request=request) + + # Handle the response + print(response) + +# [END biglake_v1alpha1_generated_MetastoreService_UpdateTable_sync] diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json new file mode 100644 index 000000000000..6dd6ed3107fe --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json @@ -0,0 +1,2502 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.bigquery.biglake.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-bigquery-biglake", + "version": "0.3.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient.create_catalog", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.CreateCatalog", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CreateCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.CreateCatalogRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "catalog", + "type": "google.cloud.bigquery.biglake_v1.types.Catalog" + }, + { + "name": "catalog_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Catalog", + "shortName": "create_catalog" + }, + "description": "Sample for CreateCatalog", + "file": "biglake_v1_generated_metastore_service_create_catalog_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_CreateCatalog_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_create_catalog_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient.create_catalog", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.CreateCatalog", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CreateCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.CreateCatalogRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "catalog", + "type": "google.cloud.bigquery.biglake_v1.types.Catalog" + }, + { + "name": "catalog_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Catalog", + "shortName": "create_catalog" + }, + "description": "Sample for CreateCatalog", + "file": "biglake_v1_generated_metastore_service_create_catalog_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_CreateCatalog_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_create_catalog_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient.create_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.CreateDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CreateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.CreateDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "database", + "type": "google.cloud.bigquery.biglake_v1.types.Database" + }, + { + "name": "database_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Database", + "shortName": "create_database" + }, + "description": "Sample for CreateDatabase", + "file": "biglake_v1_generated_metastore_service_create_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_CreateDatabase_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_create_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient.create_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.CreateDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CreateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.CreateDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "database", + "type": "google.cloud.bigquery.biglake_v1.types.Database" + }, + { + "name": "database_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Database", + "shortName": "create_database" + }, + "description": "Sample for CreateDatabase", + "file": "biglake_v1_generated_metastore_service_create_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_CreateDatabase_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_create_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient.create_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.CreateTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CreateTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.CreateTableRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "table", + "type": "google.cloud.bigquery.biglake_v1.types.Table" + }, + { + "name": "table_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Table", + "shortName": "create_table" + }, + "description": "Sample for CreateTable", + "file": "biglake_v1_generated_metastore_service_create_table_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_CreateTable_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_create_table_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient.create_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.CreateTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CreateTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.CreateTableRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "table", + "type": "google.cloud.bigquery.biglake_v1.types.Table" + }, + { + "name": "table_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Table", + "shortName": "create_table" + }, + "description": "Sample for CreateTable", + "file": "biglake_v1_generated_metastore_service_create_table_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_CreateTable_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_create_table_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient.delete_catalog", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.DeleteCatalog", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "DeleteCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.DeleteCatalogRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Catalog", + "shortName": "delete_catalog" + }, + "description": "Sample for DeleteCatalog", + "file": "biglake_v1_generated_metastore_service_delete_catalog_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_DeleteCatalog_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_delete_catalog_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient.delete_catalog", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.DeleteCatalog", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "DeleteCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.DeleteCatalogRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Catalog", + "shortName": "delete_catalog" + }, + "description": "Sample for DeleteCatalog", + "file": "biglake_v1_generated_metastore_service_delete_catalog_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_DeleteCatalog_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_delete_catalog_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient.delete_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.DeleteDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "DeleteDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.DeleteDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Database", + "shortName": "delete_database" + }, + "description": "Sample for DeleteDatabase", + "file": "biglake_v1_generated_metastore_service_delete_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_DeleteDatabase_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_delete_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient.delete_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.DeleteDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "DeleteDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.DeleteDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Database", + "shortName": "delete_database" + }, + "description": "Sample for DeleteDatabase", + "file": "biglake_v1_generated_metastore_service_delete_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_DeleteDatabase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_delete_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient.delete_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.DeleteTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "DeleteTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.DeleteTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Table", + "shortName": "delete_table" + }, + "description": "Sample for DeleteTable", + "file": "biglake_v1_generated_metastore_service_delete_table_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_DeleteTable_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_delete_table_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient.delete_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.DeleteTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "DeleteTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.DeleteTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Table", + "shortName": "delete_table" + }, + "description": "Sample for DeleteTable", + "file": "biglake_v1_generated_metastore_service_delete_table_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_DeleteTable_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_delete_table_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient.get_catalog", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.GetCatalog", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "GetCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.GetCatalogRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Catalog", + "shortName": "get_catalog" + }, + "description": "Sample for GetCatalog", + "file": "biglake_v1_generated_metastore_service_get_catalog_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_GetCatalog_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_get_catalog_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient.get_catalog", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.GetCatalog", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "GetCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.GetCatalogRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Catalog", + "shortName": "get_catalog" + }, + "description": "Sample for GetCatalog", + "file": "biglake_v1_generated_metastore_service_get_catalog_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_GetCatalog_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_get_catalog_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient.get_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.GetDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "GetDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.GetDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Database", + "shortName": "get_database" + }, + "description": "Sample for GetDatabase", + "file": "biglake_v1_generated_metastore_service_get_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_GetDatabase_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_get_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient.get_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.GetDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "GetDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.GetDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Database", + "shortName": "get_database" + }, + "description": "Sample for GetDatabase", + "file": "biglake_v1_generated_metastore_service_get_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_GetDatabase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_get_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient.get_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.GetTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "GetTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.GetTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Table", + "shortName": "get_table" + }, + "description": "Sample for GetTable", + "file": "biglake_v1_generated_metastore_service_get_table_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_GetTable_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_get_table_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient.get_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.GetTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "GetTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.GetTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Table", + "shortName": "get_table" + }, + "description": "Sample for GetTable", + "file": "biglake_v1_generated_metastore_service_get_table_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_GetTable_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_get_table_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient.list_catalogs", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.ListCatalogs", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "ListCatalogs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.ListCatalogsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.services.metastore_service.pagers.ListCatalogsAsyncPager", + "shortName": "list_catalogs" + }, + "description": "Sample for ListCatalogs", + "file": "biglake_v1_generated_metastore_service_list_catalogs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_ListCatalogs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_list_catalogs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient.list_catalogs", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.ListCatalogs", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "ListCatalogs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.ListCatalogsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.services.metastore_service.pagers.ListCatalogsPager", + "shortName": "list_catalogs" + }, + "description": "Sample for ListCatalogs", + "file": "biglake_v1_generated_metastore_service_list_catalogs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_ListCatalogs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_list_catalogs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient.list_databases", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.ListDatabases", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "ListDatabases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.ListDatabasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.services.metastore_service.pagers.ListDatabasesAsyncPager", + "shortName": "list_databases" + }, + "description": "Sample for ListDatabases", + "file": "biglake_v1_generated_metastore_service_list_databases_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_ListDatabases_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_list_databases_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient.list_databases", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.ListDatabases", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "ListDatabases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.ListDatabasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.services.metastore_service.pagers.ListDatabasesPager", + "shortName": "list_databases" + }, + "description": "Sample for ListDatabases", + "file": "biglake_v1_generated_metastore_service_list_databases_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_ListDatabases_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_list_databases_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient.list_tables", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.ListTables", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "ListTables" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.ListTablesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.services.metastore_service.pagers.ListTablesAsyncPager", + "shortName": "list_tables" + }, + "description": "Sample for ListTables", + "file": "biglake_v1_generated_metastore_service_list_tables_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_ListTables_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_list_tables_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient.list_tables", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.ListTables", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "ListTables" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.ListTablesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.services.metastore_service.pagers.ListTablesPager", + "shortName": "list_tables" + }, + "description": "Sample for ListTables", + "file": "biglake_v1_generated_metastore_service_list_tables_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_ListTables_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_list_tables_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient.rename_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.RenameTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "RenameTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.RenameTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Table", + "shortName": "rename_table" + }, + "description": "Sample for RenameTable", + "file": "biglake_v1_generated_metastore_service_rename_table_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_RenameTable_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_rename_table_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient.rename_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.RenameTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "RenameTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.RenameTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Table", + "shortName": "rename_table" + }, + "description": "Sample for RenameTable", + "file": "biglake_v1_generated_metastore_service_rename_table_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_RenameTable_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_rename_table_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient.update_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.UpdateDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "UpdateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.UpdateDatabaseRequest" + }, + { + "name": "database", + "type": "google.cloud.bigquery.biglake_v1.types.Database" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Database", + "shortName": "update_database" + }, + "description": "Sample for UpdateDatabase", + "file": "biglake_v1_generated_metastore_service_update_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_UpdateDatabase_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_update_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient.update_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.UpdateDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "UpdateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.UpdateDatabaseRequest" + }, + { + "name": "database", + "type": "google.cloud.bigquery.biglake_v1.types.Database" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Database", + "shortName": "update_database" + }, + "description": "Sample for UpdateDatabase", + "file": "biglake_v1_generated_metastore_service_update_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_UpdateDatabase_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_update_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient.update_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.UpdateTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "UpdateTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.UpdateTableRequest" + }, + { + "name": "table", + "type": "google.cloud.bigquery.biglake_v1.types.Table" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Table", + "shortName": "update_table" + }, + "description": "Sample for UpdateTable", + "file": "biglake_v1_generated_metastore_service_update_table_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_UpdateTable_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_update_table_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1.MetastoreServiceClient.update_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.UpdateTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "UpdateTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1.types.UpdateTableRequest" + }, + { + "name": "table", + "type": "google.cloud.bigquery.biglake_v1.types.Table" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1.types.Table", + "shortName": "update_table" + }, + "description": "Sample for UpdateTable", + "file": "biglake_v1_generated_metastore_service_update_table_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1_generated_MetastoreService_UpdateTable_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1_generated_metastore_service_update_table_sync.py" + } + ] +} diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json new file mode 100644 index 000000000000..49f41361350f --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json @@ -0,0 +1,3148 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.bigquery.biglake.v1alpha1", + "version": "v1alpha1" + } + ], + "language": "PYTHON", + "name": "google-cloud-bigquery-biglake", + "version": "0.3.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.check_lock", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CheckLock", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CheckLock" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.CheckLockRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Lock", + "shortName": "check_lock" + }, + "description": "Sample for CheckLock", + "file": "biglake_v1alpha1_generated_metastore_service_check_lock_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_CheckLock_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_check_lock_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.check_lock", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CheckLock", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CheckLock" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.CheckLockRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Lock", + "shortName": "check_lock" + }, + "description": "Sample for CheckLock", + "file": "biglake_v1alpha1_generated_metastore_service_check_lock_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_CheckLock_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_check_lock_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.create_catalog", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateCatalog", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CreateCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.CreateCatalogRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "catalog", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.Catalog" + }, + { + "name": "catalog_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Catalog", + "shortName": "create_catalog" + }, + "description": "Sample for CreateCatalog", + "file": "biglake_v1alpha1_generated_metastore_service_create_catalog_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateCatalog_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_create_catalog_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.create_catalog", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateCatalog", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CreateCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.CreateCatalogRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "catalog", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.Catalog" + }, + { + "name": "catalog_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Catalog", + "shortName": "create_catalog" + }, + "description": "Sample for CreateCatalog", + "file": "biglake_v1alpha1_generated_metastore_service_create_catalog_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateCatalog_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_create_catalog_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.create_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CreateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.CreateDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "database", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.Database" + }, + { + "name": "database_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Database", + "shortName": "create_database" + }, + "description": "Sample for CreateDatabase", + "file": "biglake_v1alpha1_generated_metastore_service_create_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateDatabase_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_create_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.create_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CreateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.CreateDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "database", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.Database" + }, + { + "name": "database_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Database", + "shortName": "create_database" + }, + "description": "Sample for CreateDatabase", + "file": "biglake_v1alpha1_generated_metastore_service_create_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateDatabase_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_create_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.create_lock", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateLock", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CreateLock" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.CreateLockRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "lock", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.Lock" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Lock", + "shortName": "create_lock" + }, + "description": "Sample for CreateLock", + "file": "biglake_v1alpha1_generated_metastore_service_create_lock_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateLock_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_create_lock_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.create_lock", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateLock", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CreateLock" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.CreateLockRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "lock", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.Lock" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Lock", + "shortName": "create_lock" + }, + "description": "Sample for CreateLock", + "file": "biglake_v1alpha1_generated_metastore_service_create_lock_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateLock_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_create_lock_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.create_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CreateTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.CreateTableRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "table", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.Table" + }, + { + "name": "table_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Table", + "shortName": "create_table" + }, + "description": "Sample for CreateTable", + "file": "biglake_v1alpha1_generated_metastore_service_create_table_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateTable_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_create_table_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.create_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "CreateTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.CreateTableRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "table", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.Table" + }, + { + "name": "table_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Table", + "shortName": "create_table" + }, + "description": "Sample for CreateTable", + "file": "biglake_v1alpha1_generated_metastore_service_create_table_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateTable_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_create_table_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.delete_catalog", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteCatalog", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "DeleteCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.DeleteCatalogRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Catalog", + "shortName": "delete_catalog" + }, + "description": "Sample for DeleteCatalog", + "file": "biglake_v1alpha1_generated_metastore_service_delete_catalog_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_delete_catalog_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.delete_catalog", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteCatalog", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "DeleteCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.DeleteCatalogRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Catalog", + "shortName": "delete_catalog" + }, + "description": "Sample for DeleteCatalog", + "file": "biglake_v1alpha1_generated_metastore_service_delete_catalog_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_delete_catalog_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.delete_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "DeleteDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.DeleteDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Database", + "shortName": "delete_database" + }, + "description": "Sample for DeleteDatabase", + "file": "biglake_v1alpha1_generated_metastore_service_delete_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_delete_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.delete_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "DeleteDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.DeleteDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Database", + "shortName": "delete_database" + }, + "description": "Sample for DeleteDatabase", + "file": "biglake_v1alpha1_generated_metastore_service_delete_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_delete_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.delete_lock", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteLock", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "DeleteLock" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.DeleteLockRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_lock" + }, + "description": "Sample for DeleteLock", + "file": "biglake_v1alpha1_generated_metastore_service_delete_lock_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteLock_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_delete_lock_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.delete_lock", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteLock", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "DeleteLock" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.DeleteLockRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_lock" + }, + "description": "Sample for DeleteLock", + "file": "biglake_v1alpha1_generated_metastore_service_delete_lock_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteLock_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_delete_lock_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.delete_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "DeleteTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.DeleteTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Table", + "shortName": "delete_table" + }, + "description": "Sample for DeleteTable", + "file": "biglake_v1alpha1_generated_metastore_service_delete_table_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteTable_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_delete_table_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.delete_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "DeleteTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.DeleteTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Table", + "shortName": "delete_table" + }, + "description": "Sample for DeleteTable", + "file": "biglake_v1alpha1_generated_metastore_service_delete_table_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteTable_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_delete_table_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.get_catalog", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.GetCatalog", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "GetCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.GetCatalogRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Catalog", + "shortName": "get_catalog" + }, + "description": "Sample for GetCatalog", + "file": "biglake_v1alpha1_generated_metastore_service_get_catalog_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_GetCatalog_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_get_catalog_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.get_catalog", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.GetCatalog", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "GetCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.GetCatalogRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Catalog", + "shortName": "get_catalog" + }, + "description": "Sample for GetCatalog", + "file": "biglake_v1alpha1_generated_metastore_service_get_catalog_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_GetCatalog_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_get_catalog_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.get_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.GetDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "GetDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.GetDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Database", + "shortName": "get_database" + }, + "description": "Sample for GetDatabase", + "file": "biglake_v1alpha1_generated_metastore_service_get_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_GetDatabase_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_get_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.get_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.GetDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "GetDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.GetDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Database", + "shortName": "get_database" + }, + "description": "Sample for GetDatabase", + "file": "biglake_v1alpha1_generated_metastore_service_get_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_GetDatabase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_get_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.get_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.GetTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "GetTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.GetTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Table", + "shortName": "get_table" + }, + "description": "Sample for GetTable", + "file": "biglake_v1alpha1_generated_metastore_service_get_table_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_GetTable_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_get_table_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.get_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.GetTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "GetTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.GetTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Table", + "shortName": "get_table" + }, + "description": "Sample for GetTable", + "file": "biglake_v1alpha1_generated_metastore_service_get_table_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_GetTable_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_get_table_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.list_catalogs", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListCatalogs", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "ListCatalogs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.ListCatalogsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListCatalogsAsyncPager", + "shortName": "list_catalogs" + }, + "description": "Sample for ListCatalogs", + "file": "biglake_v1alpha1_generated_metastore_service_list_catalogs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListCatalogs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_list_catalogs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.list_catalogs", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListCatalogs", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "ListCatalogs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.ListCatalogsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListCatalogsPager", + "shortName": "list_catalogs" + }, + "description": "Sample for ListCatalogs", + "file": "biglake_v1alpha1_generated_metastore_service_list_catalogs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListCatalogs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_list_catalogs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.list_databases", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListDatabases", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "ListDatabases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.ListDatabasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListDatabasesAsyncPager", + "shortName": "list_databases" + }, + "description": "Sample for ListDatabases", + "file": "biglake_v1alpha1_generated_metastore_service_list_databases_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListDatabases_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_list_databases_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.list_databases", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListDatabases", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "ListDatabases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.ListDatabasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListDatabasesPager", + "shortName": "list_databases" + }, + "description": "Sample for ListDatabases", + "file": "biglake_v1alpha1_generated_metastore_service_list_databases_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListDatabases_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_list_databases_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.list_locks", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListLocks", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "ListLocks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.ListLocksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListLocksAsyncPager", + "shortName": "list_locks" + }, + "description": "Sample for ListLocks", + "file": "biglake_v1alpha1_generated_metastore_service_list_locks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListLocks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_list_locks_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.list_locks", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListLocks", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "ListLocks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.ListLocksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListLocksPager", + "shortName": "list_locks" + }, + "description": "Sample for ListLocks", + "file": "biglake_v1alpha1_generated_metastore_service_list_locks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListLocks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_list_locks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.list_tables", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListTables", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "ListTables" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.ListTablesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListTablesAsyncPager", + "shortName": "list_tables" + }, + "description": "Sample for ListTables", + "file": "biglake_v1alpha1_generated_metastore_service_list_tables_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListTables_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_list_tables_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.list_tables", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListTables", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "ListTables" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.ListTablesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.pagers.ListTablesPager", + "shortName": "list_tables" + }, + "description": "Sample for ListTables", + "file": "biglake_v1alpha1_generated_metastore_service_list_tables_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListTables_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_list_tables_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.rename_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.RenameTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "RenameTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.RenameTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Table", + "shortName": "rename_table" + }, + "description": "Sample for RenameTable", + "file": "biglake_v1alpha1_generated_metastore_service_rename_table_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_RenameTable_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_rename_table_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.rename_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.RenameTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "RenameTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.RenameTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Table", + "shortName": "rename_table" + }, + "description": "Sample for RenameTable", + "file": "biglake_v1alpha1_generated_metastore_service_rename_table_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_RenameTable_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_rename_table_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.update_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.UpdateDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "UpdateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.UpdateDatabaseRequest" + }, + { + "name": "database", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.Database" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Database", + "shortName": "update_database" + }, + "description": "Sample for UpdateDatabase", + "file": "biglake_v1alpha1_generated_metastore_service_update_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_update_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.update_database", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.UpdateDatabase", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "UpdateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.UpdateDatabaseRequest" + }, + { + "name": "database", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.Database" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Database", + "shortName": "update_database" + }, + "description": "Sample for UpdateDatabase", + "file": "biglake_v1alpha1_generated_metastore_service_update_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_update_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient", + "shortName": "MetastoreServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient.update_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.UpdateTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "UpdateTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.UpdateTableRequest" + }, + { + "name": "table", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.Table" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Table", + "shortName": "update_table" + }, + "description": "Sample for UpdateTable", + "file": "biglake_v1alpha1_generated_metastore_service_update_table_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_UpdateTable_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_update_table_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient", + "shortName": "MetastoreServiceClient" + }, + "fullName": "google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.update_table", + "method": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.UpdateTable", + "service": { + "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", + "shortName": "MetastoreService" + }, + "shortName": "UpdateTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.UpdateTableRequest" + }, + { + "name": "table", + "type": "google.cloud.bigquery.biglake_v1alpha1.types.Table" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery.biglake_v1alpha1.types.Table", + "shortName": "update_table" + }, + "description": "Sample for UpdateTable", + "file": "biglake_v1alpha1_generated_metastore_service_update_table_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "biglake_v1alpha1_generated_MetastoreService_UpdateTable_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "biglake_v1alpha1_generated_metastore_service_update_table_sync.py" + } + ] +} diff --git a/packages/google-cloud-bigquery-biglake/scripts/decrypt-secrets.sh b/packages/google-cloud-bigquery-biglake/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-bigquery-biglake/scripts/fixup_biglake_v1_keywords.py b/packages/google-cloud-bigquery-biglake/scripts/fixup_biglake_v1_keywords.py new file mode 100644 index 000000000000..8c61aee37fb8 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/scripts/fixup_biglake_v1_keywords.py @@ -0,0 +1,190 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class biglakeCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_catalog': ('parent', 'catalog', 'catalog_id', ), + 'create_database': ('parent', 'database', 'database_id', ), + 'create_table': ('parent', 'table', 'table_id', ), + 'delete_catalog': ('name', ), + 'delete_database': ('name', ), + 'delete_table': ('name', ), + 'get_catalog': ('name', ), + 'get_database': ('name', ), + 'get_table': ('name', ), + 'list_catalogs': ('parent', 'page_size', 'page_token', ), + 'list_databases': ('parent', 'page_size', 'page_token', ), + 'list_tables': ('parent', 'page_size', 'page_token', 'view', ), + 'rename_table': ('name', 'new_name', ), + 'update_database': ('database', 'update_mask', ), + 'update_table': ('table', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=biglakeCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the biglake client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-bigquery-biglake/scripts/fixup_biglake_v1alpha1_keywords.py b/packages/google-cloud-bigquery-biglake/scripts/fixup_biglake_v1alpha1_keywords.py new file mode 100644 index 000000000000..14a629110e4d --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/scripts/fixup_biglake_v1alpha1_keywords.py @@ -0,0 +1,194 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class biglakeCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'check_lock': ('name', ), + 'create_catalog': ('parent', 'catalog', 'catalog_id', ), + 'create_database': ('parent', 'database', 'database_id', ), + 'create_lock': ('parent', 'lock', ), + 'create_table': ('parent', 'table', 'table_id', ), + 'delete_catalog': ('name', ), + 'delete_database': ('name', ), + 'delete_lock': ('name', ), + 'delete_table': ('name', ), + 'get_catalog': ('name', ), + 'get_database': ('name', ), + 'get_table': ('name', ), + 'list_catalogs': ('parent', 'page_size', 'page_token', ), + 'list_databases': ('parent', 'page_size', 'page_token', ), + 'list_locks': ('parent', 'page_size', 'page_token', ), + 'list_tables': ('parent', 'page_size', 'page_token', 'view', ), + 'rename_table': ('name', 'new_name', ), + 'update_database': ('database', 'update_mask', ), + 'update_table': ('table', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=biglakeCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the biglake client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-bigquery-biglake/setup.py b/packages/google-cloud-bigquery-biglake/setup.py new file mode 100644 index 000000000000..10ebf1310e43 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-bigquery-biglake" + + +description = "Google Cloud Bigquery Biglake API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/bigquery/biglake/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud", "google.cloud.bigquery"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-bigquery-biglake/testing/.gitignore b/packages/google-cloud-bigquery-biglake/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-bigquery-biglake/testing/constraints-3.10.txt b/packages/google-cloud-bigquery-biglake/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-biglake/testing/constraints-3.11.txt b/packages/google-cloud-bigquery-biglake/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-biglake/testing/constraints-3.12.txt b/packages/google-cloud-bigquery-biglake/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-biglake/testing/constraints-3.7.txt b/packages/google-cloud-bigquery-biglake/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-bigquery-biglake/testing/constraints-3.8.txt b/packages/google-cloud-bigquery-biglake/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-biglake/testing/constraints-3.9.txt b/packages/google-cloud-bigquery-biglake/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-biglake/tests/__init__.py b/packages/google-cloud-bigquery-biglake/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/__init__.py b/packages/google-cloud-bigquery-biglake/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/__init__.py b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/biglake_v1/__init__.py b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/biglake_v1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/biglake_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/biglake_v1/test_metastore_service.py b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/biglake_v1/test_metastore_service.py new file mode 100644 index 000000000000..2a0232dd658b --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/biglake_v1/test_metastore_service.py @@ -0,0 +1,10445 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.bigquery.biglake_v1.services.metastore_service import ( + MetastoreServiceAsyncClient, + MetastoreServiceClient, + pagers, + transports, +) +from google.cloud.bigquery.biglake_v1.types import metastore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MetastoreServiceClient._get_default_mtls_endpoint(None) is None + assert ( + MetastoreServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + MetastoreServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + MetastoreServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MetastoreServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MetastoreServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MetastoreServiceClient, "grpc"), + (MetastoreServiceAsyncClient, "grpc_asyncio"), + (MetastoreServiceClient, "rest"), + ], +) +def test_metastore_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "biglake.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://biglake.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MetastoreServiceGrpcTransport, "grpc"), + (transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MetastoreServiceRestTransport, "rest"), + ], +) +def test_metastore_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MetastoreServiceClient, "grpc"), + (MetastoreServiceAsyncClient, "grpc_asyncio"), + (MetastoreServiceClient, "rest"), + ], +) +def test_metastore_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "biglake.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://biglake.googleapis.com" + ) + + +def test_metastore_service_client_get_transport_class(): + transport = MetastoreServiceClient.get_transport_class() + available_transports = [ + transports.MetastoreServiceGrpcTransport, + transports.MetastoreServiceRestTransport, + ] + assert transport in available_transports + + transport = MetastoreServiceClient.get_transport_class("grpc") + assert transport == transports.MetastoreServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc"), + ( + MetastoreServiceAsyncClient, + transports.MetastoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + MetastoreServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetastoreServiceClient), +) +@mock.patch.object( + MetastoreServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetastoreServiceAsyncClient), +) +def test_metastore_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MetastoreServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MetastoreServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + MetastoreServiceClient, + transports.MetastoreServiceGrpcTransport, + "grpc", + "true", + ), + ( + MetastoreServiceAsyncClient, + transports.MetastoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + MetastoreServiceClient, + transports.MetastoreServiceGrpcTransport, + "grpc", + "false", + ), + ( + MetastoreServiceAsyncClient, + transports.MetastoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + MetastoreServiceClient, + transports.MetastoreServiceRestTransport, + "rest", + "true", + ), + ( + MetastoreServiceClient, + transports.MetastoreServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + MetastoreServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetastoreServiceClient), +) +@mock.patch.object( + MetastoreServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetastoreServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_metastore_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [MetastoreServiceClient, MetastoreServiceAsyncClient] +) +@mock.patch.object( + MetastoreServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetastoreServiceClient), +) +@mock.patch.object( + MetastoreServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetastoreServiceAsyncClient), +) +def test_metastore_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc"), + ( + MetastoreServiceAsyncClient, + transports.MetastoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest"), + ], +) +def test_metastore_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MetastoreServiceClient, + transports.MetastoreServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MetastoreServiceAsyncClient, + transports.MetastoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + MetastoreServiceClient, + transports.MetastoreServiceRestTransport, + "rest", + None, + ), + ], +) +def test_metastore_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_metastore_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.bigquery.biglake_v1.services.metastore_service.transports.MetastoreServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = MetastoreServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MetastoreServiceClient, + transports.MetastoreServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MetastoreServiceAsyncClient, + transports.MetastoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_metastore_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "biglake.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + scopes=None, + default_host="biglake.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CreateCatalogRequest, + dict, + ], +) +def test_create_catalog(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog( + name="name_value", + ) + response = client.create_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +def test_create_catalog_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_catalog), "__call__") as call: + client.create_catalog() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateCatalogRequest() + + +@pytest.mark.asyncio +async def test_create_catalog_async( + transport: str = "grpc_asyncio", request_type=metastore.CreateCatalogRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Catalog( + name="name_value", + ) + ) + response = await client.create_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_create_catalog_async_from_dict(): + await test_create_catalog_async(request_type=dict) + + +def test_create_catalog_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateCatalogRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_catalog), "__call__") as call: + call.return_value = metastore.Catalog() + client.create_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_catalog_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateCatalogRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_catalog), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) + await client.create_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_catalog_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_catalog( + parent="parent_value", + catalog=metastore.Catalog(name="name_value"), + catalog_id="catalog_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].catalog + mock_val = metastore.Catalog(name="name_value") + assert arg == mock_val + arg = args[0].catalog_id + mock_val = "catalog_id_value" + assert arg == mock_val + + +def test_create_catalog_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_catalog( + metastore.CreateCatalogRequest(), + parent="parent_value", + catalog=metastore.Catalog(name="name_value"), + catalog_id="catalog_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_catalog_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_catalog( + parent="parent_value", + catalog=metastore.Catalog(name="name_value"), + catalog_id="catalog_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].catalog + mock_val = metastore.Catalog(name="name_value") + assert arg == mock_val + arg = args[0].catalog_id + mock_val = "catalog_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_catalog_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_catalog( + metastore.CreateCatalogRequest(), + parent="parent_value", + catalog=metastore.Catalog(name="name_value"), + catalog_id="catalog_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.DeleteCatalogRequest, + dict, + ], +) +def test_delete_catalog(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog( + name="name_value", + ) + response = client.delete_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +def test_delete_catalog_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_catalog), "__call__") as call: + client.delete_catalog() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteCatalogRequest() + + +@pytest.mark.asyncio +async def test_delete_catalog_async( + transport: str = "grpc_asyncio", request_type=metastore.DeleteCatalogRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Catalog( + name="name_value", + ) + ) + response = await client.delete_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_catalog_async_from_dict(): + await test_delete_catalog_async(request_type=dict) + + +def test_delete_catalog_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteCatalogRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_catalog), "__call__") as call: + call.return_value = metastore.Catalog() + client.delete_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_catalog_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteCatalogRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_catalog), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) + await client.delete_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_catalog_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_catalog( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_catalog_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_catalog( + metastore.DeleteCatalogRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_catalog_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_catalog( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_catalog_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_catalog( + metastore.DeleteCatalogRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.GetCatalogRequest, + dict, + ], +) +def test_get_catalog(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog( + name="name_value", + ) + response = client.get_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +def test_get_catalog_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_catalog), "__call__") as call: + client.get_catalog() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetCatalogRequest() + + +@pytest.mark.asyncio +async def test_get_catalog_async( + transport: str = "grpc_asyncio", request_type=metastore.GetCatalogRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Catalog( + name="name_value", + ) + ) + response = await client.get_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_catalog_async_from_dict(): + await test_get_catalog_async(request_type=dict) + + +def test_get_catalog_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetCatalogRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_catalog), "__call__") as call: + call.return_value = metastore.Catalog() + client.get_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_catalog_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetCatalogRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_catalog), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) + await client.get_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_catalog_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_catalog( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_catalog_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_catalog( + metastore.GetCatalogRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_catalog_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_catalog( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_catalog_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_catalog( + metastore.GetCatalogRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.ListCatalogsRequest, + dict, + ], +) +def test_list_catalogs(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListCatalogsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_catalogs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListCatalogsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCatalogsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_catalogs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + client.list_catalogs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListCatalogsRequest() + + +@pytest.mark.asyncio +async def test_list_catalogs_async( + transport: str = "grpc_asyncio", request_type=metastore.ListCatalogsRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListCatalogsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_catalogs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListCatalogsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCatalogsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_catalogs_async_from_dict(): + await test_list_catalogs_async(request_type=dict) + + +def test_list_catalogs_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListCatalogsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + call.return_value = metastore.ListCatalogsResponse() + client.list_catalogs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_catalogs_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListCatalogsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListCatalogsResponse() + ) + await client.list_catalogs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_catalogs_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListCatalogsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_catalogs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_catalogs_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_catalogs( + metastore.ListCatalogsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_catalogs_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListCatalogsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListCatalogsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_catalogs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_catalogs_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_catalogs( + metastore.ListCatalogsRequest(), + parent="parent_value", + ) + + +def test_list_catalogs_pager(transport_name: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + metastore.Catalog(), + ], + next_page_token="abc", + ), + metastore.ListCatalogsResponse( + catalogs=[], + next_page_token="def", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + ], + next_page_token="ghi", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_catalogs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metastore.Catalog) for i in results) + + +def test_list_catalogs_pages(transport_name: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + metastore.Catalog(), + ], + next_page_token="abc", + ), + metastore.ListCatalogsResponse( + catalogs=[], + next_page_token="def", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + ], + next_page_token="ghi", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + ], + ), + RuntimeError, + ) + pages = list(client.list_catalogs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_catalogs_async_pager(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_catalogs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + metastore.Catalog(), + ], + next_page_token="abc", + ), + metastore.ListCatalogsResponse( + catalogs=[], + next_page_token="def", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + ], + next_page_token="ghi", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_catalogs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metastore.Catalog) for i in responses) + + +@pytest.mark.asyncio +async def test_list_catalogs_async_pages(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_catalogs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + metastore.Catalog(), + ], + next_page_token="abc", + ), + metastore.ListCatalogsResponse( + catalogs=[], + next_page_token="def", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + ], + next_page_token="ghi", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_catalogs(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CreateDatabaseRequest, + dict, + ], +) +def test_create_database(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + response = client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_create_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + client.create_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateDatabaseRequest() + + +@pytest.mark.asyncio +async def test_create_database_async( + transport: str = "grpc_asyncio", request_type=metastore.CreateDatabaseRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + ) + ) + response = await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +@pytest.mark.asyncio +async def test_create_database_async_from_dict(): + await test_create_database_async(request_type=dict) + + +def test_create_database_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateDatabaseRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + call.return_value = metastore.Database() + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_database_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateDatabaseRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_database_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_database( + parent="parent_value", + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + database_id="database_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].database + mock_val = metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ) + assert arg == mock_val + arg = args[0].database_id + mock_val = "database_id_value" + assert arg == mock_val + + +def test_create_database_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_database( + metastore.CreateDatabaseRequest(), + parent="parent_value", + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_database_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_database( + parent="parent_value", + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + database_id="database_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].database + mock_val = metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ) + assert arg == mock_val + arg = args[0].database_id + mock_val = "database_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_database_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_database( + metastore.CreateDatabaseRequest(), + parent="parent_value", + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + database_id="database_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.DeleteDatabaseRequest, + dict, + ], +) +def test_delete_database(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + response = client.delete_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_delete_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + client.delete_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteDatabaseRequest() + + +@pytest.mark.asyncio +async def test_delete_database_async( + transport: str = "grpc_asyncio", request_type=metastore.DeleteDatabaseRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + ) + ) + response = await client.delete_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +@pytest.mark.asyncio +async def test_delete_database_async_from_dict(): + await test_delete_database_async(request_type=dict) + + +def test_delete_database_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteDatabaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + call.return_value = metastore.Database() + client.delete_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_database_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteDatabaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + await client.delete_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_database_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_database( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_database_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_database( + metastore.DeleteDatabaseRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_database_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_database( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_database_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_database( + metastore.DeleteDatabaseRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.UpdateDatabaseRequest, + dict, + ], +) +def test_update_database(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + response = client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_update_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + client.update_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateDatabaseRequest() + + +@pytest.mark.asyncio +async def test_update_database_async( + transport: str = "grpc_asyncio", request_type=metastore.UpdateDatabaseRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + ) + ) + response = await client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +@pytest.mark.asyncio +async def test_update_database_async_from_dict(): + await test_update_database_async(request_type=dict) + + +def test_update_database_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.UpdateDatabaseRequest() + + request.database.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + call.return_value = metastore.Database() + client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "database.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_database_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.UpdateDatabaseRequest() + + request.database.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + await client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "database.name=name_value", + ) in kw["metadata"] + + +def test_update_database_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_database( + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_database_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database( + metastore.UpdateDatabaseRequest(), + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_database_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_database( + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_database_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_database( + metastore.UpdateDatabaseRequest(), + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.GetDatabaseRequest, + dict, + ], +) +def test_get_database(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + response = client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_get_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + client.get_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetDatabaseRequest() + + +@pytest.mark.asyncio +async def test_get_database_async( + transport: str = "grpc_asyncio", request_type=metastore.GetDatabaseRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + ) + ) + response = await client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +@pytest.mark.asyncio +async def test_get_database_async_from_dict(): + await test_get_database_async(request_type=dict) + + +def test_get_database_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetDatabaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + call.return_value = metastore.Database() + client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_database_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetDatabaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + await client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_database_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_database( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_database_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database( + metastore.GetDatabaseRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_database_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_database( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_database_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_database( + metastore.GetDatabaseRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.ListDatabasesRequest, + dict, + ], +) +def test_list_databases(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListDatabasesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListDatabasesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_databases_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + client.list_databases() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListDatabasesRequest() + + +@pytest.mark.asyncio +async def test_list_databases_async( + transport: str = "grpc_asyncio", request_type=metastore.ListDatabasesRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListDatabasesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListDatabasesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_databases_async_from_dict(): + await test_list_databases_async(request_type=dict) + + +def test_list_databases_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListDatabasesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + call.return_value = metastore.ListDatabasesResponse() + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_databases_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListDatabasesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListDatabasesResponse() + ) + await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_databases_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListDatabasesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_databases( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_databases_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + metastore.ListDatabasesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_databases_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListDatabasesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListDatabasesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_databases( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_databases_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_databases( + metastore.ListDatabasesRequest(), + parent="parent_value", + ) + + +def test_list_databases_pager(transport_name: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + metastore.Database(), + ], + next_page_token="abc", + ), + metastore.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + ], + next_page_token="ghi", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_databases(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metastore.Database) for i in results) + + +def test_list_databases_pages(transport_name: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + metastore.Database(), + ], + next_page_token="abc", + ), + metastore.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + ], + next_page_token="ghi", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + ], + ), + RuntimeError, + ) + pages = list(client.list_databases(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_databases_async_pager(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + metastore.Database(), + ], + next_page_token="abc", + ), + metastore.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + ], + next_page_token="ghi", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_databases( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metastore.Database) for i in responses) + + +@pytest.mark.asyncio +async def test_list_databases_async_pages(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + metastore.Database(), + ], + next_page_token="abc", + ), + metastore.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + ], + next_page_token="ghi", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_databases(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CreateTableRequest, + dict, + ], +) +def test_create_table(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + response = client.create_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_create_table_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + client.create_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateTableRequest() + + +@pytest.mark.asyncio +async def test_create_table_async( + transport: str = "grpc_asyncio", request_type=metastore.CreateTableRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + ) + ) + response = await client.create_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_create_table_async_from_dict(): + await test_create_table_async(request_type=dict) + + +def test_create_table_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateTableRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + call.return_value = metastore.Table() + client.create_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_table_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateTableRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + await client.create_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_table_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_table( + parent="parent_value", + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + table_id="table_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].table + mock_val = metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ) + assert arg == mock_val + arg = args[0].table_id + mock_val = "table_id_value" + assert arg == mock_val + + +def test_create_table_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_table( + metastore.CreateTableRequest(), + parent="parent_value", + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + table_id="table_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_table_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_table( + parent="parent_value", + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + table_id="table_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].table + mock_val = metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ) + assert arg == mock_val + arg = args[0].table_id + mock_val = "table_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_table_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_table( + metastore.CreateTableRequest(), + parent="parent_value", + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + table_id="table_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.DeleteTableRequest, + dict, + ], +) +def test_delete_table(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + response = client.delete_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_delete_table_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + client.delete_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteTableRequest() + + +@pytest.mark.asyncio +async def test_delete_table_async( + transport: str = "grpc_asyncio", request_type=metastore.DeleteTableRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + ) + ) + response = await client.delete_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_delete_table_async_from_dict(): + await test_delete_table_async(request_type=dict) + + +def test_delete_table_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + call.return_value = metastore.Table() + client.delete_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_table_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + await client.delete_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_table_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_table( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_table_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_table( + metastore.DeleteTableRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_table_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_table( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_table_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_table( + metastore.DeleteTableRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.UpdateTableRequest, + dict, + ], +) +def test_update_table(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + response = client.update_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_update_table_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + client.update_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateTableRequest() + + +@pytest.mark.asyncio +async def test_update_table_async( + transport: str = "grpc_asyncio", request_type=metastore.UpdateTableRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + ) + ) + response = await client.update_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_update_table_async_from_dict(): + await test_update_table_async(request_type=dict) + + +def test_update_table_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.UpdateTableRequest() + + request.table.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + call.return_value = metastore.Table() + client.update_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "table.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_table_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.UpdateTableRequest() + + request.table.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + await client.update_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "table.name=name_value", + ) in kw["metadata"] + + +def test_update_table_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_table( + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].table + mock_val = metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_table_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_table( + metastore.UpdateTableRequest(), + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_table_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_table( + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].table + mock_val = metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_table_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_table( + metastore.UpdateTableRequest(), + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.RenameTableRequest, + dict, + ], +) +def test_rename_table(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + response = client.rename_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.RenameTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_rename_table_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_table), "__call__") as call: + client.rename_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.RenameTableRequest() + + +@pytest.mark.asyncio +async def test_rename_table_async( + transport: str = "grpc_asyncio", request_type=metastore.RenameTableRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + ) + ) + response = await client.rename_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.RenameTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_rename_table_async_from_dict(): + await test_rename_table_async(request_type=dict) + + +def test_rename_table_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.RenameTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_table), "__call__") as call: + call.return_value = metastore.Table() + client.rename_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rename_table_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.RenameTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_table), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + await client.rename_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_rename_table_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rename_table( + name="name_value", + new_name="new_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].new_name + mock_val = "new_name_value" + assert arg == mock_val + + +def test_rename_table_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rename_table( + metastore.RenameTableRequest(), + name="name_value", + new_name="new_name_value", + ) + + +@pytest.mark.asyncio +async def test_rename_table_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rename_table( + name="name_value", + new_name="new_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].new_name + mock_val = "new_name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_rename_table_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rename_table( + metastore.RenameTableRequest(), + name="name_value", + new_name="new_name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.GetTableRequest, + dict, + ], +) +def test_get_table(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + response = client.get_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_get_table_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + client.get_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetTableRequest() + + +@pytest.mark.asyncio +async def test_get_table_async( + transport: str = "grpc_asyncio", request_type=metastore.GetTableRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + ) + ) + response = await client.get_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_get_table_async_from_dict(): + await test_get_table_async(request_type=dict) + + +def test_get_table_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + call.return_value = metastore.Table() + client.get_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_table_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + await client.get_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_table_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_table( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_table_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_table( + metastore.GetTableRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_table_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_table( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_table_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_table( + metastore.GetTableRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.ListTablesRequest, + dict, + ], +) +def test_list_tables(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListTablesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListTablesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTablesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tables_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + client.list_tables() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListTablesRequest() + + +@pytest.mark.asyncio +async def test_list_tables_async( + transport: str = "grpc_asyncio", request_type=metastore.ListTablesRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListTablesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListTablesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTablesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_tables_async_from_dict(): + await test_list_tables_async(request_type=dict) + + +def test_list_tables_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListTablesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + call.return_value = metastore.ListTablesResponse() + client.list_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_tables_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListTablesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListTablesResponse() + ) + await client.list_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_tables_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListTablesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tables( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_tables_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tables( + metastore.ListTablesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_tables_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListTablesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListTablesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tables( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_tables_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tables( + metastore.ListTablesRequest(), + parent="parent_value", + ) + + +def test_list_tables_pager(transport_name: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + metastore.Table(), + ], + next_page_token="abc", + ), + metastore.ListTablesResponse( + tables=[], + next_page_token="def", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + ], + next_page_token="ghi", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_tables(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metastore.Table) for i in results) + + +def test_list_tables_pages(transport_name: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + metastore.Table(), + ], + next_page_token="abc", + ), + metastore.ListTablesResponse( + tables=[], + next_page_token="def", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + ], + next_page_token="ghi", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tables(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tables_async_pager(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tables), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + metastore.Table(), + ], + next_page_token="abc", + ), + metastore.ListTablesResponse( + tables=[], + next_page_token="def", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + ], + next_page_token="ghi", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tables( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metastore.Table) for i in responses) + + +@pytest.mark.asyncio +async def test_list_tables_async_pages(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tables), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + metastore.Table(), + ], + next_page_token="abc", + ), + metastore.ListTablesResponse( + tables=[], + next_page_token="def", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + ], + next_page_token="ghi", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_tables(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CreateCatalogRequest, + dict, + ], +) +def test_create_catalog_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["catalog"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_catalog(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +def test_create_catalog_rest_required_fields( + request_type=metastore.CreateCatalogRequest, +): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["catalog_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "catalogId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_catalog._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "catalogId" in jsonified_request + assert jsonified_request["catalogId"] == request_init["catalog_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["catalogId"] = "catalog_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_catalog._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("catalog_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "catalogId" in jsonified_request + assert jsonified_request["catalogId"] == "catalog_id_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_catalog(request) + + expected_params = [ + ( + "catalogId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_catalog_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_catalog._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("catalogId",)) + & set( + ( + "parent", + "catalog", + "catalogId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_catalog_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_create_catalog" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_create_catalog" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.CreateCatalogRequest.pb(metastore.CreateCatalogRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Catalog.to_json(metastore.Catalog()) + + request = metastore.CreateCatalogRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Catalog() + + client.create_catalog( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_catalog_rest_bad_request( + transport: str = "rest", request_type=metastore.CreateCatalogRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["catalog"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_catalog(request) + + +def test_create_catalog_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + catalog=metastore.Catalog(name="name_value"), + catalog_id="catalog_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_catalog(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/catalogs" % client.transport._host, + args[1], + ) + + +def test_create_catalog_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_catalog( + metastore.CreateCatalogRequest(), + parent="parent_value", + catalog=metastore.Catalog(name="name_value"), + catalog_id="catalog_id_value", + ) + + +def test_create_catalog_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.DeleteCatalogRequest, + dict, + ], +) +def test_delete_catalog_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_catalog(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +def test_delete_catalog_rest_required_fields( + request_type=metastore.DeleteCatalogRequest, +): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_catalog._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_catalog._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_catalog(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_catalog_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_catalog._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_catalog_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_delete_catalog" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_delete_catalog" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.DeleteCatalogRequest.pb(metastore.DeleteCatalogRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Catalog.to_json(metastore.Catalog()) + + request = metastore.DeleteCatalogRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Catalog() + + client.delete_catalog( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_catalog_rest_bad_request( + transport: str = "rest", request_type=metastore.DeleteCatalogRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_catalog(request) + + +def test_delete_catalog_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_catalog(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/catalogs/*}" % client.transport._host, + args[1], + ) + + +def test_delete_catalog_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_catalog( + metastore.DeleteCatalogRequest(), + name="name_value", + ) + + +def test_delete_catalog_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.GetCatalogRequest, + dict, + ], +) +def test_get_catalog_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_catalog(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +def test_get_catalog_rest_required_fields(request_type=metastore.GetCatalogRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_catalog._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_catalog._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_catalog(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_catalog_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_catalog._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_catalog_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_get_catalog" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_get_catalog" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.GetCatalogRequest.pb(metastore.GetCatalogRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Catalog.to_json(metastore.Catalog()) + + request = metastore.GetCatalogRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Catalog() + + client.get_catalog( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_catalog_rest_bad_request( + transport: str = "rest", request_type=metastore.GetCatalogRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_catalog(request) + + +def test_get_catalog_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_catalog(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/catalogs/*}" % client.transport._host, + args[1], + ) + + +def test_get_catalog_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_catalog( + metastore.GetCatalogRequest(), + name="name_value", + ) + + +def test_get_catalog_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.ListCatalogsRequest, + dict, + ], +) +def test_list_catalogs_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.ListCatalogsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.ListCatalogsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_catalogs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCatalogsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_catalogs_rest_required_fields(request_type=metastore.ListCatalogsRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_catalogs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_catalogs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.ListCatalogsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.ListCatalogsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_catalogs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_catalogs_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_catalogs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_catalogs_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_list_catalogs" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_list_catalogs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.ListCatalogsRequest.pb(metastore.ListCatalogsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.ListCatalogsResponse.to_json( + metastore.ListCatalogsResponse() + ) + + request = metastore.ListCatalogsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.ListCatalogsResponse() + + client.list_catalogs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_catalogs_rest_bad_request( + transport: str = "rest", request_type=metastore.ListCatalogsRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_catalogs(request) + + +def test_list_catalogs_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.ListCatalogsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.ListCatalogsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_catalogs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/catalogs" % client.transport._host, + args[1], + ) + + +def test_list_catalogs_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_catalogs( + metastore.ListCatalogsRequest(), + parent="parent_value", + ) + + +def test_list_catalogs_rest_pager(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + metastore.Catalog(), + ], + next_page_token="abc", + ), + metastore.ListCatalogsResponse( + catalogs=[], + next_page_token="def", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + ], + next_page_token="ghi", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(metastore.ListCatalogsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_catalogs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metastore.Catalog) for i in results) + + pages = list(client.list_catalogs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CreateDatabaseRequest, + dict, + ], +) +def test_create_database_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/catalogs/sample3"} + request_init["database"] = { + "hive_options": {"location_uri": "location_uri_value", "parameters": {}}, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_create_database_rest_required_fields( + request_type=metastore.CreateDatabaseRequest, +): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["database_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "databaseId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "databaseId" in jsonified_request + assert jsonified_request["databaseId"] == request_init["database_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["databaseId"] = "database_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("database_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "databaseId" in jsonified_request + assert jsonified_request["databaseId"] == "database_id_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_database(request) + + expected_params = [ + ( + "databaseId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_database_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_database._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("databaseId",)) + & set( + ( + "parent", + "database", + "databaseId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_database_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_create_database" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_create_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.CreateDatabaseRequest.pb( + metastore.CreateDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Database.to_json(metastore.Database()) + + request = metastore.CreateDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Database() + + client.create_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_database_rest_bad_request( + transport: str = "rest", request_type=metastore.CreateDatabaseRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/catalogs/sample3"} + request_init["database"] = { + "hive_options": {"location_uri": "location_uri_value", "parameters": {}}, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_database(request) + + +def test_create_database_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + database_id="database_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/catalogs/*}/databases" + % client.transport._host, + args[1], + ) + + +def test_create_database_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_database( + metastore.CreateDatabaseRequest(), + parent="parent_value", + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + database_id="database_id_value", + ) + + +def test_create_database_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.DeleteDatabaseRequest, + dict, + ], +) +def test_delete_database_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_delete_database_rest_required_fields( + request_type=metastore.DeleteDatabaseRequest, +): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_database_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_database_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_delete_database" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_delete_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.DeleteDatabaseRequest.pb( + metastore.DeleteDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Database.to_json(metastore.Database()) + + request = metastore.DeleteDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Database() + + client.delete_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_database_rest_bad_request( + transport: str = "rest", request_type=metastore.DeleteDatabaseRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_database(request) + + +def test_delete_database_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/catalogs/*/databases/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_database_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_database( + metastore.DeleteDatabaseRequest(), + name="name_value", + ) + + +def test_delete_database_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.UpdateDatabaseRequest, + dict, + ], +) +def test_update_database_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "database": { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + } + request_init["database"] = { + "hive_options": {"location_uri": "location_uri_value", "parameters": {}}, + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_update_database_rest_required_fields( + request_type=metastore.UpdateDatabaseRequest, +): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_database_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("database",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_database_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_update_database" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_update_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.UpdateDatabaseRequest.pb( + metastore.UpdateDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Database.to_json(metastore.Database()) + + request = metastore.UpdateDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Database() + + client.update_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_database_rest_bad_request( + transport: str = "rest", request_type=metastore.UpdateDatabaseRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "database": { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + } + request_init["database"] = { + "hive_options": {"location_uri": "location_uri_value", "parameters": {}}, + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_database(request) + + +def test_update_database_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + + # get arguments that satisfy an http rule for this method + sample_request = { + "database": { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{database.name=projects/*/locations/*/catalogs/*/databases/*}" + % client.transport._host, + args[1], + ) + + +def test_update_database_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database( + metastore.UpdateDatabaseRequest(), + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_database_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.GetDatabaseRequest, + dict, + ], +) +def test_get_database_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_get_database_rest_required_fields(request_type=metastore.GetDatabaseRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_database_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_database_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_get_database" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_get_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.GetDatabaseRequest.pb(metastore.GetDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Database.to_json(metastore.Database()) + + request = metastore.GetDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Database() + + client.get_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_database_rest_bad_request( + transport: str = "rest", request_type=metastore.GetDatabaseRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_database(request) + + +def test_get_database_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/catalogs/*/databases/*}" + % client.transport._host, + args[1], + ) + + +def test_get_database_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database( + metastore.GetDatabaseRequest(), + name="name_value", + ) + + +def test_get_database_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.ListDatabasesRequest, + dict, + ], +) +def test_list_databases_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.ListDatabasesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_databases(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_databases_rest_required_fields( + request_type=metastore.ListDatabasesRequest, +): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_databases._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_databases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.ListDatabasesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_databases(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_databases_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_databases._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_databases_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_list_databases" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_list_databases" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.ListDatabasesRequest.pb(metastore.ListDatabasesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.ListDatabasesResponse.to_json( + metastore.ListDatabasesResponse() + ) + + request = metastore.ListDatabasesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.ListDatabasesResponse() + + client.list_databases( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_databases_rest_bad_request( + transport: str = "rest", request_type=metastore.ListDatabasesRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_databases(request) + + +def test_list_databases_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.ListDatabasesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_databases(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/catalogs/*}/databases" + % client.transport._host, + args[1], + ) + + +def test_list_databases_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + metastore.ListDatabasesRequest(), + parent="parent_value", + ) + + +def test_list_databases_rest_pager(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + metastore.Database(), + ], + next_page_token="abc", + ), + metastore.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + ], + next_page_token="ghi", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(metastore.ListDatabasesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3" + } + + pager = client.list_databases(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metastore.Database) for i in results) + + pages = list(client.list_databases(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CreateTableRequest, + dict, + ], +) +def test_create_table_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request_init["table"] = { + "hive_options": { + "parameters": {}, + "table_type": "table_type_value", + "storage_descriptor": { + "location_uri": "location_uri_value", + "input_format": "input_format_value", + "output_format": "output_format_value", + "serde_info": {"serialization_lib": "serialization_lib_value"}, + }, + }, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_create_table_rest_required_fields(request_type=metastore.CreateTableRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["table_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "tableId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "tableId" in jsonified_request + assert jsonified_request["tableId"] == request_init["table_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["tableId"] = "table_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_table._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("table_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "tableId" in jsonified_request + assert jsonified_request["tableId"] == "table_id_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_table(request) + + expected_params = [ + ( + "tableId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_table_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_table._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("tableId",)) + & set( + ( + "parent", + "table", + "tableId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_table_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_create_table" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_create_table" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.CreateTableRequest.pb(metastore.CreateTableRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Table.to_json(metastore.Table()) + + request = metastore.CreateTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Table() + + client.create_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_table_rest_bad_request( + transport: str = "rest", request_type=metastore.CreateTableRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request_init["table"] = { + "hive_options": { + "parameters": {}, + "table_type": "table_type_value", + "storage_descriptor": { + "location_uri": "location_uri_value", + "input_format": "input_format_value", + "output_format": "output_format_value", + "serde_info": {"serialization_lib": "serialization_lib_value"}, + }, + }, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_table(request) + + +def test_create_table_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + table_id="table_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables" + % client.transport._host, + args[1], + ) + + +def test_create_table_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_table( + metastore.CreateTableRequest(), + parent="parent_value", + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + table_id="table_id_value", + ) + + +def test_create_table_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.DeleteTableRequest, + dict, + ], +) +def test_delete_table_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_delete_table_rest_required_fields(request_type=metastore.DeleteTableRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_table(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_table_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_table._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_table_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_delete_table" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_delete_table" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.DeleteTableRequest.pb(metastore.DeleteTableRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Table.to_json(metastore.Table()) + + request = metastore.DeleteTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Table() + + client.delete_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_table_rest_bad_request( + transport: str = "rest", request_type=metastore.DeleteTableRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_table(request) + + +def test_delete_table_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_table_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_table( + metastore.DeleteTableRequest(), + name="name_value", + ) + + +def test_delete_table_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.UpdateTableRequest, + dict, + ], +) +def test_update_table_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "table": { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + } + request_init["table"] = { + "hive_options": { + "parameters": {}, + "table_type": "table_type_value", + "storage_descriptor": { + "location_uri": "location_uri_value", + "input_format": "input_format_value", + "output_format": "output_format_value", + "serde_info": {"serialization_lib": "serialization_lib_value"}, + }, + }, + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_update_table_rest_required_fields(request_type=metastore.UpdateTableRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_table._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_table(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_table_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_table._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("table",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_table_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_update_table" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_update_table" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.UpdateTableRequest.pb(metastore.UpdateTableRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Table.to_json(metastore.Table()) + + request = metastore.UpdateTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Table() + + client.update_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_table_rest_bad_request( + transport: str = "rest", request_type=metastore.UpdateTableRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "table": { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + } + request_init["table"] = { + "hive_options": { + "parameters": {}, + "table_type": "table_type_value", + "storage_descriptor": { + "location_uri": "location_uri_value", + "input_format": "input_format_value", + "output_format": "output_format_value", + "serde_info": {"serialization_lib": "serialization_lib_value"}, + }, + }, + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_table(request) + + +def test_update_table_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + + # get arguments that satisfy an http rule for this method + sample_request = { + "table": { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + } + + # get truthy value for each flattened field + mock_args = dict( + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{table.name=projects/*/locations/*/catalogs/*/databases/*/tables/*}" + % client.transport._host, + args[1], + ) + + +def test_update_table_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_table( + metastore.UpdateTableRequest(), + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_table_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.RenameTableRequest, + dict, + ], +) +def test_rename_table_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.rename_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_rename_table_rest_required_fields(request_type=metastore.RenameTableRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["new_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rename_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["newName"] = "new_name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rename_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "newName" in jsonified_request + assert jsonified_request["newName"] == "new_name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.rename_table(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_rename_table_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.rename_table._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "newName", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rename_table_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_rename_table" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_rename_table" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.RenameTableRequest.pb(metastore.RenameTableRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Table.to_json(metastore.Table()) + + request = metastore.RenameTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Table() + + client.rename_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_rename_table_rest_bad_request( + transport: str = "rest", request_type=metastore.RenameTableRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.rename_table(request) + + +def test_rename_table_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + new_name="new_name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.rename_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}:rename" + % client.transport._host, + args[1], + ) + + +def test_rename_table_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rename_table( + metastore.RenameTableRequest(), + name="name_value", + new_name="new_name_value", + ) + + +def test_rename_table_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.GetTableRequest, + dict, + ], +) +def test_get_table_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_get_table_rest_required_fields(request_type=metastore.GetTableRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_table(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_table_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_table._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_table_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_get_table" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_get_table" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.GetTableRequest.pb(metastore.GetTableRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Table.to_json(metastore.Table()) + + request = metastore.GetTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Table() + + client.get_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_table_rest_bad_request( + transport: str = "rest", request_type=metastore.GetTableRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_table(request) + + +def test_get_table_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}" + % client.transport._host, + args[1], + ) + + +def test_get_table_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_table( + metastore.GetTableRequest(), + name="name_value", + ) + + +def test_get_table_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.ListTablesRequest, + dict, + ], +) +def test_list_tables_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.ListTablesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.ListTablesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_tables(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTablesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tables_rest_required_fields(request_type=metastore.ListTablesRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tables._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tables._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.ListTablesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.ListTablesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_tables(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_tables_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_tables._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_tables_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_list_tables" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_list_tables" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.ListTablesRequest.pb(metastore.ListTablesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.ListTablesResponse.to_json( + metastore.ListTablesResponse() + ) + + request = metastore.ListTablesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.ListTablesResponse() + + client.list_tables( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_tables_rest_bad_request( + transport: str = "rest", request_type=metastore.ListTablesRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_tables(request) + + +def test_list_tables_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.ListTablesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.ListTablesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_tables(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables" + % client.transport._host, + args[1], + ) + + +def test_list_tables_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tables( + metastore.ListTablesRequest(), + parent="parent_value", + ) + + +def test_list_tables_rest_pager(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + metastore.Table(), + ], + next_page_token="abc", + ), + metastore.ListTablesResponse( + tables=[], + next_page_token="def", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + ], + next_page_token="ghi", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(metastore.ListTablesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + + pager = client.list_tables(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metastore.Table) for i in results) + + pages = list(client.list_tables(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MetastoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MetastoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetastoreServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MetastoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetastoreServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetastoreServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MetastoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetastoreServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetastoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MetastoreServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetastoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MetastoreServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetastoreServiceGrpcTransport, + transports.MetastoreServiceGrpcAsyncIOTransport, + transports.MetastoreServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = MetastoreServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MetastoreServiceGrpcTransport, + ) + + +def test_metastore_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MetastoreServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_metastore_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.bigquery.biglake_v1.services.metastore_service.transports.MetastoreServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.MetastoreServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_catalog", + "delete_catalog", + "get_catalog", + "list_catalogs", + "create_database", + "delete_database", + "update_database", + "get_database", + "list_databases", + "create_table", + "delete_table", + "update_table", + "rename_table", + "get_table", + "list_tables", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_metastore_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.bigquery.biglake_v1.services.metastore_service.transports.MetastoreServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetastoreServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_metastore_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.bigquery.biglake_v1.services.metastore_service.transports.MetastoreServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetastoreServiceTransport() + adc.assert_called_once() + + +def test_metastore_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MetastoreServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetastoreServiceGrpcTransport, + transports.MetastoreServiceGrpcAsyncIOTransport, + ], +) +def test_metastore_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetastoreServiceGrpcTransport, + transports.MetastoreServiceGrpcAsyncIOTransport, + transports.MetastoreServiceRestTransport, + ], +) +def test_metastore_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetastoreServiceGrpcTransport, grpc_helpers), + (transports.MetastoreServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_metastore_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "biglake.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + scopes=["1", "2"], + default_host="biglake.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetastoreServiceGrpcTransport, + transports.MetastoreServiceGrpcAsyncIOTransport, + ], +) +def test_metastore_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_metastore_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MetastoreServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_metastore_service_host_no_port(transport_name): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="biglake.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "biglake.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://biglake.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_metastore_service_host_with_port(transport_name): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="biglake.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "biglake.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://biglake.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_metastore_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MetastoreServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = MetastoreServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_catalog._session + session2 = client2.transport.create_catalog._session + assert session1 != session2 + session1 = client1.transport.delete_catalog._session + session2 = client2.transport.delete_catalog._session + assert session1 != session2 + session1 = client1.transport.get_catalog._session + session2 = client2.transport.get_catalog._session + assert session1 != session2 + session1 = client1.transport.list_catalogs._session + session2 = client2.transport.list_catalogs._session + assert session1 != session2 + session1 = client1.transport.create_database._session + session2 = client2.transport.create_database._session + assert session1 != session2 + session1 = client1.transport.delete_database._session + session2 = client2.transport.delete_database._session + assert session1 != session2 + session1 = client1.transport.update_database._session + session2 = client2.transport.update_database._session + assert session1 != session2 + session1 = client1.transport.get_database._session + session2 = client2.transport.get_database._session + assert session1 != session2 + session1 = client1.transport.list_databases._session + session2 = client2.transport.list_databases._session + assert session1 != session2 + session1 = client1.transport.create_table._session + session2 = client2.transport.create_table._session + assert session1 != session2 + session1 = client1.transport.delete_table._session + session2 = client2.transport.delete_table._session + assert session1 != session2 + session1 = client1.transport.update_table._session + session2 = client2.transport.update_table._session + assert session1 != session2 + session1 = client1.transport.rename_table._session + session2 = client2.transport.rename_table._session + assert session1 != session2 + session1 = client1.transport.get_table._session + session2 = client2.transport.get_table._session + assert session1 != session2 + session1 = client1.transport.list_tables._session + session2 = client2.transport.list_tables._session + assert session1 != session2 + + +def test_metastore_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetastoreServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_metastore_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetastoreServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetastoreServiceGrpcTransport, + transports.MetastoreServiceGrpcAsyncIOTransport, + ], +) +def test_metastore_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetastoreServiceGrpcTransport, + transports.MetastoreServiceGrpcAsyncIOTransport, + ], +) +def test_metastore_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_catalog_path(): + project = "squid" + location = "clam" + catalog = "whelk" + expected = "projects/{project}/locations/{location}/catalogs/{catalog}".format( + project=project, + location=location, + catalog=catalog, + ) + actual = MetastoreServiceClient.catalog_path(project, location, catalog) + assert expected == actual + + +def test_parse_catalog_path(): + expected = { + "project": "octopus", + "location": "oyster", + "catalog": "nudibranch", + } + path = MetastoreServiceClient.catalog_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_catalog_path(path) + assert expected == actual + + +def test_database_path(): + project = "cuttlefish" + location = "mussel" + catalog = "winkle" + database = "nautilus" + expected = "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}".format( + project=project, + location=location, + catalog=catalog, + database=database, + ) + actual = MetastoreServiceClient.database_path(project, location, catalog, database) + assert expected == actual + + +def test_parse_database_path(): + expected = { + "project": "scallop", + "location": "abalone", + "catalog": "squid", + "database": "clam", + } + path = MetastoreServiceClient.database_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_database_path(path) + assert expected == actual + + +def test_table_path(): + project = "whelk" + location = "octopus" + catalog = "oyster" + database = "nudibranch" + table = "cuttlefish" + expected = "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/tables/{table}".format( + project=project, + location=location, + catalog=catalog, + database=database, + table=table, + ) + actual = MetastoreServiceClient.table_path( + project, location, catalog, database, table + ) + assert expected == actual + + +def test_parse_table_path(): + expected = { + "project": "mussel", + "location": "winkle", + "catalog": "nautilus", + "database": "scallop", + "table": "abalone", + } + path = MetastoreServiceClient.table_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_table_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = MetastoreServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = MetastoreServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = MetastoreServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = MetastoreServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MetastoreServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = MetastoreServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = MetastoreServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = MetastoreServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = MetastoreServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = MetastoreServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.MetastoreServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.MetastoreServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = MetastoreServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport), + (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/biglake_v1alpha1/__init__.py b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/biglake_v1alpha1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/biglake_v1alpha1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/biglake_v1alpha1/test_metastore_service.py b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/biglake_v1alpha1/test_metastore_service.py new file mode 100644 index 000000000000..f4a373239724 --- /dev/null +++ b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/biglake_v1alpha1/test_metastore_service.py @@ -0,0 +1,12792 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.bigquery.biglake_v1alpha1.services.metastore_service import ( + MetastoreServiceAsyncClient, + MetastoreServiceClient, + pagers, + transports, +) +from google.cloud.bigquery.biglake_v1alpha1.types import metastore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MetastoreServiceClient._get_default_mtls_endpoint(None) is None + assert ( + MetastoreServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + MetastoreServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + MetastoreServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MetastoreServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MetastoreServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MetastoreServiceClient, "grpc"), + (MetastoreServiceAsyncClient, "grpc_asyncio"), + (MetastoreServiceClient, "rest"), + ], +) +def test_metastore_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "biglake.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://biglake.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MetastoreServiceGrpcTransport, "grpc"), + (transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MetastoreServiceRestTransport, "rest"), + ], +) +def test_metastore_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MetastoreServiceClient, "grpc"), + (MetastoreServiceAsyncClient, "grpc_asyncio"), + (MetastoreServiceClient, "rest"), + ], +) +def test_metastore_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "biglake.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://biglake.googleapis.com" + ) + + +def test_metastore_service_client_get_transport_class(): + transport = MetastoreServiceClient.get_transport_class() + available_transports = [ + transports.MetastoreServiceGrpcTransport, + transports.MetastoreServiceRestTransport, + ] + assert transport in available_transports + + transport = MetastoreServiceClient.get_transport_class("grpc") + assert transport == transports.MetastoreServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc"), + ( + MetastoreServiceAsyncClient, + transports.MetastoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + MetastoreServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetastoreServiceClient), +) +@mock.patch.object( + MetastoreServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetastoreServiceAsyncClient), +) +def test_metastore_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MetastoreServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MetastoreServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + MetastoreServiceClient, + transports.MetastoreServiceGrpcTransport, + "grpc", + "true", + ), + ( + MetastoreServiceAsyncClient, + transports.MetastoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + MetastoreServiceClient, + transports.MetastoreServiceGrpcTransport, + "grpc", + "false", + ), + ( + MetastoreServiceAsyncClient, + transports.MetastoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + MetastoreServiceClient, + transports.MetastoreServiceRestTransport, + "rest", + "true", + ), + ( + MetastoreServiceClient, + transports.MetastoreServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + MetastoreServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetastoreServiceClient), +) +@mock.patch.object( + MetastoreServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetastoreServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_metastore_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [MetastoreServiceClient, MetastoreServiceAsyncClient] +) +@mock.patch.object( + MetastoreServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetastoreServiceClient), +) +@mock.patch.object( + MetastoreServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetastoreServiceAsyncClient), +) +def test_metastore_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc"), + ( + MetastoreServiceAsyncClient, + transports.MetastoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest"), + ], +) +def test_metastore_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MetastoreServiceClient, + transports.MetastoreServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MetastoreServiceAsyncClient, + transports.MetastoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + MetastoreServiceClient, + transports.MetastoreServiceRestTransport, + "rest", + None, + ), + ], +) +def test_metastore_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_metastore_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.transports.MetastoreServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = MetastoreServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MetastoreServiceClient, + transports.MetastoreServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MetastoreServiceAsyncClient, + transports.MetastoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_metastore_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "biglake.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + scopes=None, + default_host="biglake.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CreateCatalogRequest, + dict, + ], +) +def test_create_catalog(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog( + name="name_value", + ) + response = client.create_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +def test_create_catalog_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_catalog), "__call__") as call: + client.create_catalog() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateCatalogRequest() + + +@pytest.mark.asyncio +async def test_create_catalog_async( + transport: str = "grpc_asyncio", request_type=metastore.CreateCatalogRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Catalog( + name="name_value", + ) + ) + response = await client.create_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_create_catalog_async_from_dict(): + await test_create_catalog_async(request_type=dict) + + +def test_create_catalog_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateCatalogRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_catalog), "__call__") as call: + call.return_value = metastore.Catalog() + client.create_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_catalog_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateCatalogRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_catalog), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) + await client.create_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_catalog_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_catalog( + parent="parent_value", + catalog=metastore.Catalog(name="name_value"), + catalog_id="catalog_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].catalog + mock_val = metastore.Catalog(name="name_value") + assert arg == mock_val + arg = args[0].catalog_id + mock_val = "catalog_id_value" + assert arg == mock_val + + +def test_create_catalog_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_catalog( + metastore.CreateCatalogRequest(), + parent="parent_value", + catalog=metastore.Catalog(name="name_value"), + catalog_id="catalog_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_catalog_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_catalog( + parent="parent_value", + catalog=metastore.Catalog(name="name_value"), + catalog_id="catalog_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].catalog + mock_val = metastore.Catalog(name="name_value") + assert arg == mock_val + arg = args[0].catalog_id + mock_val = "catalog_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_catalog_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_catalog( + metastore.CreateCatalogRequest(), + parent="parent_value", + catalog=metastore.Catalog(name="name_value"), + catalog_id="catalog_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.DeleteCatalogRequest, + dict, + ], +) +def test_delete_catalog(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog( + name="name_value", + ) + response = client.delete_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +def test_delete_catalog_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_catalog), "__call__") as call: + client.delete_catalog() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteCatalogRequest() + + +@pytest.mark.asyncio +async def test_delete_catalog_async( + transport: str = "grpc_asyncio", request_type=metastore.DeleteCatalogRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Catalog( + name="name_value", + ) + ) + response = await client.delete_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_catalog_async_from_dict(): + await test_delete_catalog_async(request_type=dict) + + +def test_delete_catalog_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteCatalogRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_catalog), "__call__") as call: + call.return_value = metastore.Catalog() + client.delete_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_catalog_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteCatalogRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_catalog), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) + await client.delete_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_catalog_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_catalog( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_catalog_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_catalog( + metastore.DeleteCatalogRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_catalog_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_catalog( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_catalog_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_catalog( + metastore.DeleteCatalogRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.GetCatalogRequest, + dict, + ], +) +def test_get_catalog(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog( + name="name_value", + ) + response = client.get_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +def test_get_catalog_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_catalog), "__call__") as call: + client.get_catalog() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetCatalogRequest() + + +@pytest.mark.asyncio +async def test_get_catalog_async( + transport: str = "grpc_asyncio", request_type=metastore.GetCatalogRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Catalog( + name="name_value", + ) + ) + response = await client.get_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_catalog_async_from_dict(): + await test_get_catalog_async(request_type=dict) + + +def test_get_catalog_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetCatalogRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_catalog), "__call__") as call: + call.return_value = metastore.Catalog() + client.get_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_catalog_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetCatalogRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_catalog), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) + await client.get_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_catalog_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_catalog( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_catalog_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_catalog( + metastore.GetCatalogRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_catalog_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_catalog), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Catalog() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_catalog( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_catalog_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_catalog( + metastore.GetCatalogRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.ListCatalogsRequest, + dict, + ], +) +def test_list_catalogs(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListCatalogsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_catalogs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListCatalogsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCatalogsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_catalogs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + client.list_catalogs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListCatalogsRequest() + + +@pytest.mark.asyncio +async def test_list_catalogs_async( + transport: str = "grpc_asyncio", request_type=metastore.ListCatalogsRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListCatalogsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_catalogs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListCatalogsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCatalogsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_catalogs_async_from_dict(): + await test_list_catalogs_async(request_type=dict) + + +def test_list_catalogs_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListCatalogsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + call.return_value = metastore.ListCatalogsResponse() + client.list_catalogs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_catalogs_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListCatalogsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListCatalogsResponse() + ) + await client.list_catalogs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_catalogs_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListCatalogsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_catalogs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_catalogs_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_catalogs( + metastore.ListCatalogsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_catalogs_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListCatalogsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListCatalogsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_catalogs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_catalogs_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_catalogs( + metastore.ListCatalogsRequest(), + parent="parent_value", + ) + + +def test_list_catalogs_pager(transport_name: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + metastore.Catalog(), + ], + next_page_token="abc", + ), + metastore.ListCatalogsResponse( + catalogs=[], + next_page_token="def", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + ], + next_page_token="ghi", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_catalogs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metastore.Catalog) for i in results) + + +def test_list_catalogs_pages(transport_name: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_catalogs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + metastore.Catalog(), + ], + next_page_token="abc", + ), + metastore.ListCatalogsResponse( + catalogs=[], + next_page_token="def", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + ], + next_page_token="ghi", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + ], + ), + RuntimeError, + ) + pages = list(client.list_catalogs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_catalogs_async_pager(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_catalogs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + metastore.Catalog(), + ], + next_page_token="abc", + ), + metastore.ListCatalogsResponse( + catalogs=[], + next_page_token="def", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + ], + next_page_token="ghi", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_catalogs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metastore.Catalog) for i in responses) + + +@pytest.mark.asyncio +async def test_list_catalogs_async_pages(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_catalogs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + metastore.Catalog(), + ], + next_page_token="abc", + ), + metastore.ListCatalogsResponse( + catalogs=[], + next_page_token="def", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + ], + next_page_token="ghi", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_catalogs(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CreateDatabaseRequest, + dict, + ], +) +def test_create_database(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + response = client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_create_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + client.create_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateDatabaseRequest() + + +@pytest.mark.asyncio +async def test_create_database_async( + transport: str = "grpc_asyncio", request_type=metastore.CreateDatabaseRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + ) + ) + response = await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +@pytest.mark.asyncio +async def test_create_database_async_from_dict(): + await test_create_database_async(request_type=dict) + + +def test_create_database_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateDatabaseRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + call.return_value = metastore.Database() + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_database_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateDatabaseRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_database_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_database( + parent="parent_value", + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + database_id="database_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].database + mock_val = metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ) + assert arg == mock_val + arg = args[0].database_id + mock_val = "database_id_value" + assert arg == mock_val + + +def test_create_database_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_database( + metastore.CreateDatabaseRequest(), + parent="parent_value", + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_database_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_database( + parent="parent_value", + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + database_id="database_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].database + mock_val = metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ) + assert arg == mock_val + arg = args[0].database_id + mock_val = "database_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_database_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_database( + metastore.CreateDatabaseRequest(), + parent="parent_value", + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + database_id="database_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.DeleteDatabaseRequest, + dict, + ], +) +def test_delete_database(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + response = client.delete_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_delete_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + client.delete_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteDatabaseRequest() + + +@pytest.mark.asyncio +async def test_delete_database_async( + transport: str = "grpc_asyncio", request_type=metastore.DeleteDatabaseRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + ) + ) + response = await client.delete_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +@pytest.mark.asyncio +async def test_delete_database_async_from_dict(): + await test_delete_database_async(request_type=dict) + + +def test_delete_database_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteDatabaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + call.return_value = metastore.Database() + client.delete_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_database_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteDatabaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + await client.delete_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_database_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_database( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_database_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_database( + metastore.DeleteDatabaseRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_database_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_database( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_database_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_database( + metastore.DeleteDatabaseRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.UpdateDatabaseRequest, + dict, + ], +) +def test_update_database(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + response = client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_update_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + client.update_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateDatabaseRequest() + + +@pytest.mark.asyncio +async def test_update_database_async( + transport: str = "grpc_asyncio", request_type=metastore.UpdateDatabaseRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + ) + ) + response = await client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +@pytest.mark.asyncio +async def test_update_database_async_from_dict(): + await test_update_database_async(request_type=dict) + + +def test_update_database_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.UpdateDatabaseRequest() + + request.database.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + call.return_value = metastore.Database() + client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "database.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_database_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.UpdateDatabaseRequest() + + request.database.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + await client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "database.name=name_value", + ) in kw["metadata"] + + +def test_update_database_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_database( + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_database_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database( + metastore.UpdateDatabaseRequest(), + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_database_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_database( + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_database_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_database( + metastore.UpdateDatabaseRequest(), + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.GetDatabaseRequest, + dict, + ], +) +def test_get_database(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + response = client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_get_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + client.get_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetDatabaseRequest() + + +@pytest.mark.asyncio +async def test_get_database_async( + transport: str = "grpc_asyncio", request_type=metastore.GetDatabaseRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + ) + ) + response = await client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +@pytest.mark.asyncio +async def test_get_database_async_from_dict(): + await test_get_database_async(request_type=dict) + + +def test_get_database_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetDatabaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + call.return_value = metastore.Database() + client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_database_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetDatabaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + await client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_database_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_database( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_database_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database( + metastore.GetDatabaseRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_database_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Database() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_database( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_database_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_database( + metastore.GetDatabaseRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.ListDatabasesRequest, + dict, + ], +) +def test_list_databases(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListDatabasesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListDatabasesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_databases_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + client.list_databases() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListDatabasesRequest() + + +@pytest.mark.asyncio +async def test_list_databases_async( + transport: str = "grpc_asyncio", request_type=metastore.ListDatabasesRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListDatabasesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListDatabasesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_databases_async_from_dict(): + await test_list_databases_async(request_type=dict) + + +def test_list_databases_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListDatabasesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + call.return_value = metastore.ListDatabasesResponse() + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_databases_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListDatabasesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListDatabasesResponse() + ) + await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_databases_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListDatabasesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_databases( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_databases_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + metastore.ListDatabasesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_databases_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListDatabasesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListDatabasesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_databases( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_databases_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_databases( + metastore.ListDatabasesRequest(), + parent="parent_value", + ) + + +def test_list_databases_pager(transport_name: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + metastore.Database(), + ], + next_page_token="abc", + ), + metastore.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + ], + next_page_token="ghi", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_databases(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metastore.Database) for i in results) + + +def test_list_databases_pages(transport_name: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + metastore.Database(), + ], + next_page_token="abc", + ), + metastore.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + ], + next_page_token="ghi", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + ], + ), + RuntimeError, + ) + pages = list(client.list_databases(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_databases_async_pager(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + metastore.Database(), + ], + next_page_token="abc", + ), + metastore.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + ], + next_page_token="ghi", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_databases( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metastore.Database) for i in responses) + + +@pytest.mark.asyncio +async def test_list_databases_async_pages(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + metastore.Database(), + ], + next_page_token="abc", + ), + metastore.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + ], + next_page_token="ghi", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_databases(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CreateTableRequest, + dict, + ], +) +def test_create_table(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + response = client.create_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_create_table_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + client.create_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateTableRequest() + + +@pytest.mark.asyncio +async def test_create_table_async( + transport: str = "grpc_asyncio", request_type=metastore.CreateTableRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + ) + ) + response = await client.create_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_create_table_async_from_dict(): + await test_create_table_async(request_type=dict) + + +def test_create_table_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateTableRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + call.return_value = metastore.Table() + client.create_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_table_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateTableRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + await client.create_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_table_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_table( + parent="parent_value", + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + table_id="table_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].table + mock_val = metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ) + assert arg == mock_val + arg = args[0].table_id + mock_val = "table_id_value" + assert arg == mock_val + + +def test_create_table_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_table( + metastore.CreateTableRequest(), + parent="parent_value", + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + table_id="table_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_table_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_table( + parent="parent_value", + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + table_id="table_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].table + mock_val = metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ) + assert arg == mock_val + arg = args[0].table_id + mock_val = "table_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_table_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_table( + metastore.CreateTableRequest(), + parent="parent_value", + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + table_id="table_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.DeleteTableRequest, + dict, + ], +) +def test_delete_table(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + response = client.delete_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_delete_table_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + client.delete_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteTableRequest() + + +@pytest.mark.asyncio +async def test_delete_table_async( + transport: str = "grpc_asyncio", request_type=metastore.DeleteTableRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + ) + ) + response = await client.delete_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_delete_table_async_from_dict(): + await test_delete_table_async(request_type=dict) + + +def test_delete_table_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + call.return_value = metastore.Table() + client.delete_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_table_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + await client.delete_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_table_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_table( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_table_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_table( + metastore.DeleteTableRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_table_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_table( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_table_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_table( + metastore.DeleteTableRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.UpdateTableRequest, + dict, + ], +) +def test_update_table(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + response = client.update_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_update_table_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + client.update_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateTableRequest() + + +@pytest.mark.asyncio +async def test_update_table_async( + transport: str = "grpc_asyncio", request_type=metastore.UpdateTableRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + ) + ) + response = await client.update_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_update_table_async_from_dict(): + await test_update_table_async(request_type=dict) + + +def test_update_table_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.UpdateTableRequest() + + request.table.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + call.return_value = metastore.Table() + client.update_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "table.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_table_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.UpdateTableRequest() + + request.table.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + await client.update_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "table.name=name_value", + ) in kw["metadata"] + + +def test_update_table_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_table( + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].table + mock_val = metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_table_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_table( + metastore.UpdateTableRequest(), + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_table_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_table( + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].table + mock_val = metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_table_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_table( + metastore.UpdateTableRequest(), + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.RenameTableRequest, + dict, + ], +) +def test_rename_table(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + response = client.rename_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.RenameTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_rename_table_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_table), "__call__") as call: + client.rename_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.RenameTableRequest() + + +@pytest.mark.asyncio +async def test_rename_table_async( + transport: str = "grpc_asyncio", request_type=metastore.RenameTableRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + ) + ) + response = await client.rename_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.RenameTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_rename_table_async_from_dict(): + await test_rename_table_async(request_type=dict) + + +def test_rename_table_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.RenameTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_table), "__call__") as call: + call.return_value = metastore.Table() + client.rename_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rename_table_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.RenameTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_table), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + await client.rename_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_rename_table_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rename_table( + name="name_value", + new_name="new_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].new_name + mock_val = "new_name_value" + assert arg == mock_val + + +def test_rename_table_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rename_table( + metastore.RenameTableRequest(), + name="name_value", + new_name="new_name_value", + ) + + +@pytest.mark.asyncio +async def test_rename_table_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rename_table( + name="name_value", + new_name="new_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].new_name + mock_val = "new_name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_rename_table_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rename_table( + metastore.RenameTableRequest(), + name="name_value", + new_name="new_name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.GetTableRequest, + dict, + ], +) +def test_get_table(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + response = client.get_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_get_table_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + client.get_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetTableRequest() + + +@pytest.mark.asyncio +async def test_get_table_async( + transport: str = "grpc_asyncio", request_type=metastore.GetTableRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + ) + ) + response = await client.get_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetTableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_get_table_async_from_dict(): + await test_get_table_async(request_type=dict) + + +def test_get_table_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + call.return_value = metastore.Table() + client.get_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_table_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + await client.get_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_table_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_table( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_table_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_table( + metastore.GetTableRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_table_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Table() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_table( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_table_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_table( + metastore.GetTableRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.ListTablesRequest, + dict, + ], +) +def test_list_tables(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListTablesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListTablesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTablesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tables_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + client.list_tables() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListTablesRequest() + + +@pytest.mark.asyncio +async def test_list_tables_async( + transport: str = "grpc_asyncio", request_type=metastore.ListTablesRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListTablesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListTablesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTablesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_tables_async_from_dict(): + await test_list_tables_async(request_type=dict) + + +def test_list_tables_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListTablesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + call.return_value = metastore.ListTablesResponse() + client.list_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_tables_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListTablesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListTablesResponse() + ) + await client.list_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_tables_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListTablesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tables( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_tables_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tables( + metastore.ListTablesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_tables_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListTablesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListTablesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tables( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_tables_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tables( + metastore.ListTablesRequest(), + parent="parent_value", + ) + + +def test_list_tables_pager(transport_name: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + metastore.Table(), + ], + next_page_token="abc", + ), + metastore.ListTablesResponse( + tables=[], + next_page_token="def", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + ], + next_page_token="ghi", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_tables(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metastore.Table) for i in results) + + +def test_list_tables_pages(transport_name: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + metastore.Table(), + ], + next_page_token="abc", + ), + metastore.ListTablesResponse( + tables=[], + next_page_token="def", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + ], + next_page_token="ghi", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tables(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tables_async_pager(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tables), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + metastore.Table(), + ], + next_page_token="abc", + ), + metastore.ListTablesResponse( + tables=[], + next_page_token="def", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + ], + next_page_token="ghi", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tables( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metastore.Table) for i in responses) + + +@pytest.mark.asyncio +async def test_list_tables_async_pages(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tables), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + metastore.Table(), + ], + next_page_token="abc", + ), + metastore.ListTablesResponse( + tables=[], + next_page_token="def", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + ], + next_page_token="ghi", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_tables(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CreateLockRequest, + dict, + ], +) +def test_create_lock(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_lock), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Lock( + name="name_value", + type_=metastore.Lock.Type.EXCLUSIVE, + state=metastore.Lock.State.WAITING, + table_id="table_id_value", + ) + response = client.create_lock(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateLockRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Lock) + assert response.name == "name_value" + assert response.type_ == metastore.Lock.Type.EXCLUSIVE + assert response.state == metastore.Lock.State.WAITING + + +def test_create_lock_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_lock), "__call__") as call: + client.create_lock() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateLockRequest() + + +@pytest.mark.asyncio +async def test_create_lock_async( + transport: str = "grpc_asyncio", request_type=metastore.CreateLockRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_lock), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Lock( + name="name_value", + type_=metastore.Lock.Type.EXCLUSIVE, + state=metastore.Lock.State.WAITING, + ) + ) + response = await client.create_lock(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateLockRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Lock) + assert response.name == "name_value" + assert response.type_ == metastore.Lock.Type.EXCLUSIVE + assert response.state == metastore.Lock.State.WAITING + + +@pytest.mark.asyncio +async def test_create_lock_async_from_dict(): + await test_create_lock_async(request_type=dict) + + +def test_create_lock_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateLockRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_lock), "__call__") as call: + call.return_value = metastore.Lock() + client.create_lock(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_lock_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateLockRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_lock), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Lock()) + await client.create_lock(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_lock_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_lock), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Lock() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_lock( + parent="parent_value", + lock=metastore.Lock(table_id="table_id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].lock + mock_val = metastore.Lock(table_id="table_id_value") + assert arg == mock_val + + +def test_create_lock_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_lock( + metastore.CreateLockRequest(), + parent="parent_value", + lock=metastore.Lock(table_id="table_id_value"), + ) + + +@pytest.mark.asyncio +async def test_create_lock_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_lock), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Lock() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Lock()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_lock( + parent="parent_value", + lock=metastore.Lock(table_id="table_id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].lock + mock_val = metastore.Lock(table_id="table_id_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_lock_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_lock( + metastore.CreateLockRequest(), + parent="parent_value", + lock=metastore.Lock(table_id="table_id_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.DeleteLockRequest, + dict, + ], +) +def test_delete_lock(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_lock), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_lock(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteLockRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_lock_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_lock), "__call__") as call: + client.delete_lock() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteLockRequest() + + +@pytest.mark.asyncio +async def test_delete_lock_async( + transport: str = "grpc_asyncio", request_type=metastore.DeleteLockRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_lock), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_lock(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteLockRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_lock_async_from_dict(): + await test_delete_lock_async(request_type=dict) + + +def test_delete_lock_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteLockRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_lock), "__call__") as call: + call.return_value = None + client.delete_lock(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_lock_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteLockRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_lock), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_lock(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_lock_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_lock), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_lock( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_lock_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_lock( + metastore.DeleteLockRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_lock_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_lock), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_lock( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_lock_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_lock( + metastore.DeleteLockRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CheckLockRequest, + dict, + ], +) +def test_check_lock(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_lock), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Lock( + name="name_value", + type_=metastore.Lock.Type.EXCLUSIVE, + state=metastore.Lock.State.WAITING, + table_id="table_id_value", + ) + response = client.check_lock(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CheckLockRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Lock) + assert response.name == "name_value" + assert response.type_ == metastore.Lock.Type.EXCLUSIVE + assert response.state == metastore.Lock.State.WAITING + + +def test_check_lock_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_lock), "__call__") as call: + client.check_lock() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CheckLockRequest() + + +@pytest.mark.asyncio +async def test_check_lock_async( + transport: str = "grpc_asyncio", request_type=metastore.CheckLockRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_lock), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Lock( + name="name_value", + type_=metastore.Lock.Type.EXCLUSIVE, + state=metastore.Lock.State.WAITING, + ) + ) + response = await client.check_lock(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CheckLockRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Lock) + assert response.name == "name_value" + assert response.type_ == metastore.Lock.Type.EXCLUSIVE + assert response.state == metastore.Lock.State.WAITING + + +@pytest.mark.asyncio +async def test_check_lock_async_from_dict(): + await test_check_lock_async(request_type=dict) + + +def test_check_lock_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CheckLockRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_lock), "__call__") as call: + call.return_value = metastore.Lock() + client.check_lock(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_check_lock_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CheckLockRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_lock), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Lock()) + await client.check_lock(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_check_lock_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_lock), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Lock() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.check_lock( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_check_lock_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.check_lock( + metastore.CheckLockRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_check_lock_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.check_lock), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Lock() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Lock()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.check_lock( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_check_lock_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.check_lock( + metastore.CheckLockRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.ListLocksRequest, + dict, + ], +) +def test_list_locks(request_type, transport: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListLocksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_locks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListLocksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLocksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_locks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locks), "__call__") as call: + client.list_locks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListLocksRequest() + + +@pytest.mark.asyncio +async def test_list_locks_async( + transport: str = "grpc_asyncio", request_type=metastore.ListLocksRequest +): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListLocksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_locks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListLocksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLocksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_locks_async_from_dict(): + await test_list_locks_async(request_type=dict) + + +def test_list_locks_field_headers(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListLocksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locks), "__call__") as call: + call.return_value = metastore.ListLocksResponse() + client.list_locks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locks_field_headers_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListLocksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locks), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListLocksResponse() + ) + await client.list_locks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_locks_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListLocksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_locks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_locks_flattened_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_locks( + metastore.ListLocksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_locks_flattened_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListLocksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListLocksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_locks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_locks_flattened_error_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_locks( + metastore.ListLocksRequest(), + parent="parent_value", + ) + + +def test_list_locks_pager(transport_name: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListLocksResponse( + locks=[ + metastore.Lock(), + metastore.Lock(), + metastore.Lock(), + ], + next_page_token="abc", + ), + metastore.ListLocksResponse( + locks=[], + next_page_token="def", + ), + metastore.ListLocksResponse( + locks=[ + metastore.Lock(), + ], + next_page_token="ghi", + ), + metastore.ListLocksResponse( + locks=[ + metastore.Lock(), + metastore.Lock(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_locks(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metastore.Lock) for i in results) + + +def test_list_locks_pages(transport_name: str = "grpc"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListLocksResponse( + locks=[ + metastore.Lock(), + metastore.Lock(), + metastore.Lock(), + ], + next_page_token="abc", + ), + metastore.ListLocksResponse( + locks=[], + next_page_token="def", + ), + metastore.ListLocksResponse( + locks=[ + metastore.Lock(), + ], + next_page_token="ghi", + ), + metastore.ListLocksResponse( + locks=[ + metastore.Lock(), + metastore.Lock(), + ], + ), + RuntimeError, + ) + pages = list(client.list_locks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_locks_async_pager(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_locks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListLocksResponse( + locks=[ + metastore.Lock(), + metastore.Lock(), + metastore.Lock(), + ], + next_page_token="abc", + ), + metastore.ListLocksResponse( + locks=[], + next_page_token="def", + ), + metastore.ListLocksResponse( + locks=[ + metastore.Lock(), + ], + next_page_token="ghi", + ), + metastore.ListLocksResponse( + locks=[ + metastore.Lock(), + metastore.Lock(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_locks( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metastore.Lock) for i in responses) + + +@pytest.mark.asyncio +async def test_list_locks_async_pages(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_locks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListLocksResponse( + locks=[ + metastore.Lock(), + metastore.Lock(), + metastore.Lock(), + ], + next_page_token="abc", + ), + metastore.ListLocksResponse( + locks=[], + next_page_token="def", + ), + metastore.ListLocksResponse( + locks=[ + metastore.Lock(), + ], + next_page_token="ghi", + ), + metastore.ListLocksResponse( + locks=[ + metastore.Lock(), + metastore.Lock(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_locks(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CreateCatalogRequest, + dict, + ], +) +def test_create_catalog_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["catalog"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_catalog(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +def test_create_catalog_rest_required_fields( + request_type=metastore.CreateCatalogRequest, +): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["catalog_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "catalogId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_catalog._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "catalogId" in jsonified_request + assert jsonified_request["catalogId"] == request_init["catalog_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["catalogId"] = "catalog_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_catalog._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("catalog_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "catalogId" in jsonified_request + assert jsonified_request["catalogId"] == "catalog_id_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_catalog(request) + + expected_params = [ + ( + "catalogId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_catalog_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_catalog._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("catalogId",)) + & set( + ( + "parent", + "catalog", + "catalogId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_catalog_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_create_catalog" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_create_catalog" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.CreateCatalogRequest.pb(metastore.CreateCatalogRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Catalog.to_json(metastore.Catalog()) + + request = metastore.CreateCatalogRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Catalog() + + client.create_catalog( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_catalog_rest_bad_request( + transport: str = "rest", request_type=metastore.CreateCatalogRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["catalog"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_catalog(request) + + +def test_create_catalog_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + catalog=metastore.Catalog(name="name_value"), + catalog_id="catalog_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_catalog(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/catalogs" + % client.transport._host, + args[1], + ) + + +def test_create_catalog_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_catalog( + metastore.CreateCatalogRequest(), + parent="parent_value", + catalog=metastore.Catalog(name="name_value"), + catalog_id="catalog_id_value", + ) + + +def test_create_catalog_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.DeleteCatalogRequest, + dict, + ], +) +def test_delete_catalog_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_catalog(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +def test_delete_catalog_rest_required_fields( + request_type=metastore.DeleteCatalogRequest, +): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_catalog._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_catalog._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_catalog(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_catalog_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_catalog._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_catalog_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_delete_catalog" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_delete_catalog" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.DeleteCatalogRequest.pb(metastore.DeleteCatalogRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Catalog.to_json(metastore.Catalog()) + + request = metastore.DeleteCatalogRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Catalog() + + client.delete_catalog( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_catalog_rest_bad_request( + transport: str = "rest", request_type=metastore.DeleteCatalogRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_catalog(request) + + +def test_delete_catalog_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_catalog(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/catalogs/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_catalog_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_catalog( + metastore.DeleteCatalogRequest(), + name="name_value", + ) + + +def test_delete_catalog_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.GetCatalogRequest, + dict, + ], +) +def test_get_catalog_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_catalog(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Catalog) + assert response.name == "name_value" + + +def test_get_catalog_rest_required_fields(request_type=metastore.GetCatalogRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_catalog._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_catalog._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_catalog(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_catalog_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_catalog._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_catalog_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_get_catalog" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_get_catalog" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.GetCatalogRequest.pb(metastore.GetCatalogRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Catalog.to_json(metastore.Catalog()) + + request = metastore.GetCatalogRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Catalog() + + client.get_catalog( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_catalog_rest_bad_request( + transport: str = "rest", request_type=metastore.GetCatalogRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_catalog(request) + + +def test_get_catalog_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Catalog() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Catalog.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_catalog(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/catalogs/*}" + % client.transport._host, + args[1], + ) + + +def test_get_catalog_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_catalog( + metastore.GetCatalogRequest(), + name="name_value", + ) + + +def test_get_catalog_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.ListCatalogsRequest, + dict, + ], +) +def test_list_catalogs_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.ListCatalogsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.ListCatalogsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_catalogs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCatalogsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_catalogs_rest_required_fields(request_type=metastore.ListCatalogsRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_catalogs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_catalogs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.ListCatalogsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.ListCatalogsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_catalogs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_catalogs_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_catalogs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_catalogs_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_list_catalogs" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_list_catalogs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.ListCatalogsRequest.pb(metastore.ListCatalogsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.ListCatalogsResponse.to_json( + metastore.ListCatalogsResponse() + ) + + request = metastore.ListCatalogsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.ListCatalogsResponse() + + client.list_catalogs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_catalogs_rest_bad_request( + transport: str = "rest", request_type=metastore.ListCatalogsRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_catalogs(request) + + +def test_list_catalogs_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.ListCatalogsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.ListCatalogsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_catalogs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/catalogs" + % client.transport._host, + args[1], + ) + + +def test_list_catalogs_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_catalogs( + metastore.ListCatalogsRequest(), + parent="parent_value", + ) + + +def test_list_catalogs_rest_pager(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + metastore.Catalog(), + ], + next_page_token="abc", + ), + metastore.ListCatalogsResponse( + catalogs=[], + next_page_token="def", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + ], + next_page_token="ghi", + ), + metastore.ListCatalogsResponse( + catalogs=[ + metastore.Catalog(), + metastore.Catalog(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(metastore.ListCatalogsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_catalogs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metastore.Catalog) for i in results) + + pages = list(client.list_catalogs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CreateDatabaseRequest, + dict, + ], +) +def test_create_database_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/catalogs/sample3"} + request_init["database"] = { + "hive_options": {"location_uri": "location_uri_value", "parameters": {}}, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_create_database_rest_required_fields( + request_type=metastore.CreateDatabaseRequest, +): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["database_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "databaseId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "databaseId" in jsonified_request + assert jsonified_request["databaseId"] == request_init["database_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["databaseId"] = "database_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("database_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "databaseId" in jsonified_request + assert jsonified_request["databaseId"] == "database_id_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_database(request) + + expected_params = [ + ( + "databaseId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_database_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_database._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("databaseId",)) + & set( + ( + "parent", + "database", + "databaseId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_database_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_create_database" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_create_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.CreateDatabaseRequest.pb( + metastore.CreateDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Database.to_json(metastore.Database()) + + request = metastore.CreateDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Database() + + client.create_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_database_rest_bad_request( + transport: str = "rest", request_type=metastore.CreateDatabaseRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/catalogs/sample3"} + request_init["database"] = { + "hive_options": {"location_uri": "location_uri_value", "parameters": {}}, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_database(request) + + +def test_create_database_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + database_id="database_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*/catalogs/*}/databases" + % client.transport._host, + args[1], + ) + + +def test_create_database_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_database( + metastore.CreateDatabaseRequest(), + parent="parent_value", + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + database_id="database_id_value", + ) + + +def test_create_database_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.DeleteDatabaseRequest, + dict, + ], +) +def test_delete_database_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_delete_database_rest_required_fields( + request_type=metastore.DeleteDatabaseRequest, +): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_database_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_database_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_delete_database" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_delete_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.DeleteDatabaseRequest.pb( + metastore.DeleteDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Database.to_json(metastore.Database()) + + request = metastore.DeleteDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Database() + + client.delete_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_database_rest_bad_request( + transport: str = "rest", request_type=metastore.DeleteDatabaseRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_database(request) + + +def test_delete_database_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_database_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_database( + metastore.DeleteDatabaseRequest(), + name="name_value", + ) + + +def test_delete_database_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.UpdateDatabaseRequest, + dict, + ], +) +def test_update_database_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "database": { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + } + request_init["database"] = { + "hive_options": {"location_uri": "location_uri_value", "parameters": {}}, + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_update_database_rest_required_fields( + request_type=metastore.UpdateDatabaseRequest, +): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_database_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("database",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_database_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_update_database" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_update_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.UpdateDatabaseRequest.pb( + metastore.UpdateDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Database.to_json(metastore.Database()) + + request = metastore.UpdateDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Database() + + client.update_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_database_rest_bad_request( + transport: str = "rest", request_type=metastore.UpdateDatabaseRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "database": { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + } + request_init["database"] = { + "hive_options": {"location_uri": "location_uri_value", "parameters": {}}, + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_database(request) + + +def test_update_database_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + + # get arguments that satisfy an http rule for this method + sample_request = { + "database": { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{database.name=projects/*/locations/*/catalogs/*/databases/*}" + % client.transport._host, + args[1], + ) + + +def test_update_database_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database( + metastore.UpdateDatabaseRequest(), + database=metastore.Database( + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_database_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.GetDatabaseRequest, + dict, + ], +) +def test_get_database_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database( + name="name_value", + type_=metastore.Database.Type.HIVE, + hive_options=metastore.HiveDatabaseOptions( + location_uri="location_uri_value" + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Database) + assert response.name == "name_value" + assert response.type_ == metastore.Database.Type.HIVE + + +def test_get_database_rest_required_fields(request_type=metastore.GetDatabaseRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_database_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_database_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_get_database" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_get_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.GetDatabaseRequest.pb(metastore.GetDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Database.to_json(metastore.Database()) + + request = metastore.GetDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Database() + + client.get_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_database_rest_bad_request( + transport: str = "rest", request_type=metastore.GetDatabaseRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_database(request) + + +def test_get_database_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Database() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Database.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*}" + % client.transport._host, + args[1], + ) + + +def test_get_database_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database( + metastore.GetDatabaseRequest(), + name="name_value", + ) + + +def test_get_database_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.ListDatabasesRequest, + dict, + ], +) +def test_list_databases_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.ListDatabasesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_databases(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_databases_rest_required_fields( + request_type=metastore.ListDatabasesRequest, +): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_databases._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_databases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.ListDatabasesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_databases(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_databases_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_databases._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_databases_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_list_databases" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_list_databases" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.ListDatabasesRequest.pb(metastore.ListDatabasesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.ListDatabasesResponse.to_json( + metastore.ListDatabasesResponse() + ) + + request = metastore.ListDatabasesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.ListDatabasesResponse() + + client.list_databases( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_databases_rest_bad_request( + transport: str = "rest", request_type=metastore.ListDatabasesRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_databases(request) + + +def test_list_databases_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.ListDatabasesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_databases(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*/catalogs/*}/databases" + % client.transport._host, + args[1], + ) + + +def test_list_databases_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + metastore.ListDatabasesRequest(), + parent="parent_value", + ) + + +def test_list_databases_rest_pager(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + metastore.Database(), + ], + next_page_token="abc", + ), + metastore.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + ], + next_page_token="ghi", + ), + metastore.ListDatabasesResponse( + databases=[ + metastore.Database(), + metastore.Database(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(metastore.ListDatabasesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3" + } + + pager = client.list_databases(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metastore.Database) for i in results) + + pages = list(client.list_databases(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CreateTableRequest, + dict, + ], +) +def test_create_table_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request_init["table"] = { + "hive_options": { + "parameters": {}, + "table_type": "table_type_value", + "storage_descriptor": { + "location_uri": "location_uri_value", + "input_format": "input_format_value", + "output_format": "output_format_value", + "serde_info": {"serialization_lib": "serialization_lib_value"}, + }, + }, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_create_table_rest_required_fields(request_type=metastore.CreateTableRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["table_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "tableId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "tableId" in jsonified_request + assert jsonified_request["tableId"] == request_init["table_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["tableId"] = "table_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_table._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("table_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "tableId" in jsonified_request + assert jsonified_request["tableId"] == "table_id_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_table(request) + + expected_params = [ + ( + "tableId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_table_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_table._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("tableId",)) + & set( + ( + "parent", + "table", + "tableId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_table_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_create_table" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_create_table" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.CreateTableRequest.pb(metastore.CreateTableRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Table.to_json(metastore.Table()) + + request = metastore.CreateTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Table() + + client.create_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_table_rest_bad_request( + transport: str = "rest", request_type=metastore.CreateTableRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request_init["table"] = { + "hive_options": { + "parameters": {}, + "table_type": "table_type_value", + "storage_descriptor": { + "location_uri": "location_uri_value", + "input_format": "input_format_value", + "output_format": "output_format_value", + "serde_info": {"serialization_lib": "serialization_lib_value"}, + }, + }, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_table(request) + + +def test_create_table_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + table_id="table_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables" + % client.transport._host, + args[1], + ) + + +def test_create_table_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_table( + metastore.CreateTableRequest(), + parent="parent_value", + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + table_id="table_id_value", + ) + + +def test_create_table_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.DeleteTableRequest, + dict, + ], +) +def test_delete_table_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_delete_table_rest_required_fields(request_type=metastore.DeleteTableRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_table(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_table_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_table._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_table_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_delete_table" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_delete_table" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.DeleteTableRequest.pb(metastore.DeleteTableRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Table.to_json(metastore.Table()) + + request = metastore.DeleteTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Table() + + client.delete_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_table_rest_bad_request( + transport: str = "rest", request_type=metastore.DeleteTableRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_table(request) + + +def test_delete_table_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_table_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_table( + metastore.DeleteTableRequest(), + name="name_value", + ) + + +def test_delete_table_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.UpdateTableRequest, + dict, + ], +) +def test_update_table_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "table": { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + } + request_init["table"] = { + "hive_options": { + "parameters": {}, + "table_type": "table_type_value", + "storage_descriptor": { + "location_uri": "location_uri_value", + "input_format": "input_format_value", + "output_format": "output_format_value", + "serde_info": {"serialization_lib": "serialization_lib_value"}, + }, + }, + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_update_table_rest_required_fields(request_type=metastore.UpdateTableRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_table._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_table(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_table_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_table._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("table",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_table_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_update_table" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_update_table" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.UpdateTableRequest.pb(metastore.UpdateTableRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Table.to_json(metastore.Table()) + + request = metastore.UpdateTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Table() + + client.update_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_table_rest_bad_request( + transport: str = "rest", request_type=metastore.UpdateTableRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "table": { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + } + request_init["table"] = { + "hive_options": { + "parameters": {}, + "table_type": "table_type_value", + "storage_descriptor": { + "location_uri": "location_uri_value", + "input_format": "input_format_value", + "output_format": "output_format_value", + "serde_info": {"serialization_lib": "serialization_lib_value"}, + }, + }, + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "type_": 1, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_table(request) + + +def test_update_table_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + + # get arguments that satisfy an http rule for this method + sample_request = { + "table": { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + } + + # get truthy value for each flattened field + mock_args = dict( + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{table.name=projects/*/locations/*/catalogs/*/databases/*/tables/*}" + % client.transport._host, + args[1], + ) + + +def test_update_table_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_table( + metastore.UpdateTableRequest(), + table=metastore.Table( + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_table_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.RenameTableRequest, + dict, + ], +) +def test_rename_table_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.rename_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_rename_table_rest_required_fields(request_type=metastore.RenameTableRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["new_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rename_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["newName"] = "new_name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rename_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "newName" in jsonified_request + assert jsonified_request["newName"] == "new_name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.rename_table(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_rename_table_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.rename_table._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "newName", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rename_table_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_rename_table" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_rename_table" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.RenameTableRequest.pb(metastore.RenameTableRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Table.to_json(metastore.Table()) + + request = metastore.RenameTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Table() + + client.rename_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_rename_table_rest_bad_request( + transport: str = "rest", request_type=metastore.RenameTableRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.rename_table(request) + + +def test_rename_table_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + new_name="new_name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.rename_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}:rename" + % client.transport._host, + args[1], + ) + + +def test_rename_table_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rename_table( + metastore.RenameTableRequest(), + name="name_value", + new_name="new_name_value", + ) + + +def test_rename_table_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.GetTableRequest, + dict, + ], +) +def test_get_table_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table( + name="name_value", + type_=metastore.Table.Type.HIVE, + etag="etag_value", + hive_options=metastore.HiveTableOptions( + parameters={"key_value": "value_value"} + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Table) + assert response.name == "name_value" + assert response.type_ == metastore.Table.Type.HIVE + assert response.etag == "etag_value" + + +def test_get_table_rest_required_fields(request_type=metastore.GetTableRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_table(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_table_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_table._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_table_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_get_table" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_get_table" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.GetTableRequest.pb(metastore.GetTableRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Table.to_json(metastore.Table()) + + request = metastore.GetTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Table() + + client.get_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_table_rest_bad_request( + transport: str = "rest", request_type=metastore.GetTableRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_table(request) + + +def test_get_table_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Table() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Table.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}" + % client.transport._host, + args[1], + ) + + +def test_get_table_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_table( + metastore.GetTableRequest(), + name="name_value", + ) + + +def test_get_table_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.ListTablesRequest, + dict, + ], +) +def test_list_tables_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.ListTablesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.ListTablesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_tables(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTablesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tables_rest_required_fields(request_type=metastore.ListTablesRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tables._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tables._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.ListTablesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.ListTablesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_tables(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_tables_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_tables._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_tables_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_list_tables" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_list_tables" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.ListTablesRequest.pb(metastore.ListTablesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.ListTablesResponse.to_json( + metastore.ListTablesResponse() + ) + + request = metastore.ListTablesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.ListTablesResponse() + + client.list_tables( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_tables_rest_bad_request( + transport: str = "rest", request_type=metastore.ListTablesRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_tables(request) + + +def test_list_tables_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.ListTablesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.ListTablesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_tables(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables" + % client.transport._host, + args[1], + ) + + +def test_list_tables_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tables( + metastore.ListTablesRequest(), + parent="parent_value", + ) + + +def test_list_tables_rest_pager(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + metastore.Table(), + ], + next_page_token="abc", + ), + metastore.ListTablesResponse( + tables=[], + next_page_token="def", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + ], + next_page_token="ghi", + ), + metastore.ListTablesResponse( + tables=[ + metastore.Table(), + metastore.Table(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(metastore.ListTablesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + + pager = client.list_tables(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metastore.Table) for i in results) + + pages = list(client.list_tables(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CreateLockRequest, + dict, + ], +) +def test_create_lock_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request_init["lock"] = { + "table_id": "table_id_value", + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "type_": 1, + "state": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Lock( + name="name_value", + type_=metastore.Lock.Type.EXCLUSIVE, + state=metastore.Lock.State.WAITING, + table_id="table_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Lock.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_lock(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Lock) + assert response.name == "name_value" + assert response.type_ == metastore.Lock.Type.EXCLUSIVE + assert response.state == metastore.Lock.State.WAITING + + +def test_create_lock_rest_required_fields(request_type=metastore.CreateLockRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_lock._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_lock._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Lock() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Lock.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_lock(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_lock_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_lock._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "lock", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_lock_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_create_lock" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_create_lock" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.CreateLockRequest.pb(metastore.CreateLockRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Lock.to_json(metastore.Lock()) + + request = metastore.CreateLockRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Lock() + + client.create_lock( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_lock_rest_bad_request( + transport: str = "rest", request_type=metastore.CreateLockRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request_init["lock"] = { + "table_id": "table_id_value", + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "type_": 1, + "state": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_lock(request) + + +def test_create_lock_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Lock() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + lock=metastore.Lock(table_id="table_id_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Lock.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_lock(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/locks" + % client.transport._host, + args[1], + ) + + +def test_create_lock_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_lock( + metastore.CreateLockRequest(), + parent="parent_value", + lock=metastore.Lock(table_id="table_id_value"), + ) + + +def test_create_lock_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.DeleteLockRequest, + dict, + ], +) +def test_delete_lock_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/locks/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_lock(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_lock_rest_required_fields(request_type=metastore.DeleteLockRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_lock._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_lock._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_lock(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_lock_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_lock._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_lock_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_delete_lock" + ) as pre: + pre.assert_not_called() + pb_message = metastore.DeleteLockRequest.pb(metastore.DeleteLockRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = metastore.DeleteLockRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_lock( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_lock_rest_bad_request( + transport: str = "rest", request_type=metastore.DeleteLockRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/locks/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_lock(request) + + +def test_delete_lock_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/locks/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_lock(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/locks/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_lock_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_lock( + metastore.DeleteLockRequest(), + name="name_value", + ) + + +def test_delete_lock_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.CheckLockRequest, + dict, + ], +) +def test_check_lock_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/locks/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Lock( + name="name_value", + type_=metastore.Lock.Type.EXCLUSIVE, + state=metastore.Lock.State.WAITING, + table_id="table_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Lock.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.check_lock(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Lock) + assert response.name == "name_value" + assert response.type_ == metastore.Lock.Type.EXCLUSIVE + assert response.state == metastore.Lock.State.WAITING + + +def test_check_lock_rest_required_fields(request_type=metastore.CheckLockRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).check_lock._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).check_lock._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.Lock() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.Lock.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.check_lock(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_check_lock_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.check_lock._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_check_lock_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_check_lock" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_check_lock" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.CheckLockRequest.pb(metastore.CheckLockRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.Lock.to_json(metastore.Lock()) + + request = metastore.CheckLockRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.Lock() + + client.check_lock( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_check_lock_rest_bad_request( + transport: str = "rest", request_type=metastore.CheckLockRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/locks/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.check_lock(request) + + +def test_check_lock_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.Lock() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/locks/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.Lock.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.check_lock(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/locks/*}:check" + % client.transport._host, + args[1], + ) + + +def test_check_lock_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.check_lock( + metastore.CheckLockRequest(), + name="name_value", + ) + + +def test_check_lock_rest_error(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metastore.ListLocksRequest, + dict, + ], +) +def test_list_locks_rest(request_type): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.ListLocksResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.ListLocksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_locks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLocksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_locks_rest_required_fields(request_type=metastore.ListLocksRequest): + transport_class = transports.MetastoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_locks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_locks._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metastore.ListLocksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = metastore.ListLocksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locks(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_locks_rest_unset_required_fields(): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_locks._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_locks_rest_interceptors(null_interceptor): + transport = transports.MetastoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetastoreServiceRestInterceptor(), + ) + client = MetastoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_list_locks" + ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "pre_list_locks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metastore.ListLocksRequest.pb(metastore.ListLocksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metastore.ListLocksResponse.to_json( + metastore.ListLocksResponse() + ) + + request = metastore.ListLocksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metastore.ListLocksResponse() + + client.list_locks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_locks_rest_bad_request( + transport: str = "rest", request_type=metastore.ListLocksRequest +): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locks(request) + + +def test_list_locks_rest_flattened(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metastore.ListLocksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metastore.ListLocksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_locks(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/locks" + % client.transport._host, + args[1], + ) + + +def test_list_locks_rest_flattened_error(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_locks( + metastore.ListLocksRequest(), + parent="parent_value", + ) + + +def test_list_locks_rest_pager(transport: str = "rest"): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + metastore.ListLocksResponse( + locks=[ + metastore.Lock(), + metastore.Lock(), + metastore.Lock(), + ], + next_page_token="abc", + ), + metastore.ListLocksResponse( + locks=[], + next_page_token="def", + ), + metastore.ListLocksResponse( + locks=[ + metastore.Lock(), + ], + next_page_token="ghi", + ), + metastore.ListLocksResponse( + locks=[ + metastore.Lock(), + metastore.Lock(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(metastore.ListLocksResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/catalogs/sample3/databases/sample4" + } + + pager = client.list_locks(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metastore.Lock) for i in results) + + pages = list(client.list_locks(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MetastoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MetastoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetastoreServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MetastoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetastoreServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetastoreServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MetastoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetastoreServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetastoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MetastoreServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetastoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MetastoreServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetastoreServiceGrpcTransport, + transports.MetastoreServiceGrpcAsyncIOTransport, + transports.MetastoreServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = MetastoreServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MetastoreServiceGrpcTransport, + ) + + +def test_metastore_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MetastoreServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_metastore_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.transports.MetastoreServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.MetastoreServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_catalog", + "delete_catalog", + "get_catalog", + "list_catalogs", + "create_database", + "delete_database", + "update_database", + "get_database", + "list_databases", + "create_table", + "delete_table", + "update_table", + "rename_table", + "get_table", + "list_tables", + "create_lock", + "delete_lock", + "check_lock", + "list_locks", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_metastore_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.transports.MetastoreServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetastoreServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_metastore_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.bigquery.biglake_v1alpha1.services.metastore_service.transports.MetastoreServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetastoreServiceTransport() + adc.assert_called_once() + + +def test_metastore_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MetastoreServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetastoreServiceGrpcTransport, + transports.MetastoreServiceGrpcAsyncIOTransport, + ], +) +def test_metastore_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetastoreServiceGrpcTransport, + transports.MetastoreServiceGrpcAsyncIOTransport, + transports.MetastoreServiceRestTransport, + ], +) +def test_metastore_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetastoreServiceGrpcTransport, grpc_helpers), + (transports.MetastoreServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_metastore_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "biglake.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + scopes=["1", "2"], + default_host="biglake.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetastoreServiceGrpcTransport, + transports.MetastoreServiceGrpcAsyncIOTransport, + ], +) +def test_metastore_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_metastore_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MetastoreServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_metastore_service_host_no_port(transport_name): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="biglake.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "biglake.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://biglake.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_metastore_service_host_with_port(transport_name): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="biglake.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "biglake.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://biglake.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_metastore_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MetastoreServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = MetastoreServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_catalog._session + session2 = client2.transport.create_catalog._session + assert session1 != session2 + session1 = client1.transport.delete_catalog._session + session2 = client2.transport.delete_catalog._session + assert session1 != session2 + session1 = client1.transport.get_catalog._session + session2 = client2.transport.get_catalog._session + assert session1 != session2 + session1 = client1.transport.list_catalogs._session + session2 = client2.transport.list_catalogs._session + assert session1 != session2 + session1 = client1.transport.create_database._session + session2 = client2.transport.create_database._session + assert session1 != session2 + session1 = client1.transport.delete_database._session + session2 = client2.transport.delete_database._session + assert session1 != session2 + session1 = client1.transport.update_database._session + session2 = client2.transport.update_database._session + assert session1 != session2 + session1 = client1.transport.get_database._session + session2 = client2.transport.get_database._session + assert session1 != session2 + session1 = client1.transport.list_databases._session + session2 = client2.transport.list_databases._session + assert session1 != session2 + session1 = client1.transport.create_table._session + session2 = client2.transport.create_table._session + assert session1 != session2 + session1 = client1.transport.delete_table._session + session2 = client2.transport.delete_table._session + assert session1 != session2 + session1 = client1.transport.update_table._session + session2 = client2.transport.update_table._session + assert session1 != session2 + session1 = client1.transport.rename_table._session + session2 = client2.transport.rename_table._session + assert session1 != session2 + session1 = client1.transport.get_table._session + session2 = client2.transport.get_table._session + assert session1 != session2 + session1 = client1.transport.list_tables._session + session2 = client2.transport.list_tables._session + assert session1 != session2 + session1 = client1.transport.create_lock._session + session2 = client2.transport.create_lock._session + assert session1 != session2 + session1 = client1.transport.delete_lock._session + session2 = client2.transport.delete_lock._session + assert session1 != session2 + session1 = client1.transport.check_lock._session + session2 = client2.transport.check_lock._session + assert session1 != session2 + session1 = client1.transport.list_locks._session + session2 = client2.transport.list_locks._session + assert session1 != session2 + + +def test_metastore_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetastoreServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_metastore_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetastoreServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetastoreServiceGrpcTransport, + transports.MetastoreServiceGrpcAsyncIOTransport, + ], +) +def test_metastore_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetastoreServiceGrpcTransport, + transports.MetastoreServiceGrpcAsyncIOTransport, + ], +) +def test_metastore_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_catalog_path(): + project = "squid" + location = "clam" + catalog = "whelk" + expected = "projects/{project}/locations/{location}/catalogs/{catalog}".format( + project=project, + location=location, + catalog=catalog, + ) + actual = MetastoreServiceClient.catalog_path(project, location, catalog) + assert expected == actual + + +def test_parse_catalog_path(): + expected = { + "project": "octopus", + "location": "oyster", + "catalog": "nudibranch", + } + path = MetastoreServiceClient.catalog_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_catalog_path(path) + assert expected == actual + + +def test_database_path(): + project = "cuttlefish" + location = "mussel" + catalog = "winkle" + database = "nautilus" + expected = "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}".format( + project=project, + location=location, + catalog=catalog, + database=database, + ) + actual = MetastoreServiceClient.database_path(project, location, catalog, database) + assert expected == actual + + +def test_parse_database_path(): + expected = { + "project": "scallop", + "location": "abalone", + "catalog": "squid", + "database": "clam", + } + path = MetastoreServiceClient.database_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_database_path(path) + assert expected == actual + + +def test_lock_path(): + project = "whelk" + location = "octopus" + catalog = "oyster" + database = "nudibranch" + lock = "cuttlefish" + expected = "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/locks/{lock}".format( + project=project, + location=location, + catalog=catalog, + database=database, + lock=lock, + ) + actual = MetastoreServiceClient.lock_path( + project, location, catalog, database, lock + ) + assert expected == actual + + +def test_parse_lock_path(): + expected = { + "project": "mussel", + "location": "winkle", + "catalog": "nautilus", + "database": "scallop", + "lock": "abalone", + } + path = MetastoreServiceClient.lock_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_lock_path(path) + assert expected == actual + + +def test_table_path(): + project = "squid" + location = "clam" + catalog = "whelk" + database = "octopus" + table = "oyster" + expected = "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/tables/{table}".format( + project=project, + location=location, + catalog=catalog, + database=database, + table=table, + ) + actual = MetastoreServiceClient.table_path( + project, location, catalog, database, table + ) + assert expected == actual + + +def test_parse_table_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "catalog": "mussel", + "database": "winkle", + "table": "nautilus", + } + path = MetastoreServiceClient.table_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_table_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = MetastoreServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = MetastoreServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = MetastoreServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = MetastoreServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MetastoreServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = MetastoreServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = MetastoreServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = MetastoreServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = MetastoreServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = MetastoreServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MetastoreServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.MetastoreServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.MetastoreServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = MetastoreServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = MetastoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = MetastoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport), + (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-confidentialcomputing/.OwlBot.yaml b/packages/google-cloud-confidentialcomputing/.OwlBot.yaml new file mode 100644 index 000000000000..5239b0de8d33 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/.OwlBot.yaml @@ -0,0 +1,20 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-preserve-regex: + - /owl-bot-staging/google-cloud-confidentialcomputing/v1alpha + +deep-copy-regex: + - source: /google/cloud/confidentialcomputing/(.*)/.*-py + dest: /owl-bot-staging/google-cloud-confidentialcomputing/$1 diff --git a/packages/google-cloud-confidentialcomputing/.coveragerc b/packages/google-cloud-confidentialcomputing/.coveragerc new file mode 100644 index 000000000000..ecca1f30bedc --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/confidentialcomputing/__init__.py + google/cloud/confidentialcomputing/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-confidentialcomputing/.flake8 b/packages/google-cloud-confidentialcomputing/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-confidentialcomputing/.gitignore b/packages/google-cloud-confidentialcomputing/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-confidentialcomputing/.repo-metadata.json b/packages/google-cloud-confidentialcomputing/.repo-metadata.json new file mode 100644 index 000000000000..d4e530cf22a8 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "confidentialcomputing", + "name_pretty": "Confidential Computing API", + "api_description": "Protect data in-use with Confidential VMs, Confidential GKE, Confidential Dataproc, and Confidential Space.", + "product_documentation": "https://cloud.google.com/confidential-computing", + "client_documentation": "https://cloud.google.com/python/docs/reference/confidentialcomputing/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1166820", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-confidentialcomputing", + "api_id": "confidentialcomputing.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "confidentialcomputing" +} diff --git a/packages/google-cloud-confidentialcomputing/CHANGELOG.md b/packages/google-cloud-confidentialcomputing/CHANGELOG.md new file mode 100644 index 000000000000..ed7991c99044 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 0.1.0 (2023-04-19) + + +### Features + +* add initial files for google.cloud.confidentialcomputing.v1 ([#11102](https://github.com/googleapis/google-cloud-python/issues/11102)) ([816cd75](https://github.com/googleapis/google-cloud-python/commit/816cd752bd8a354d82c19ec75dbb5f3056e2d480)) + +## Changelog diff --git a/packages/google-cloud-confidentialcomputing/CODE_OF_CONDUCT.md b/packages/google-cloud-confidentialcomputing/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-confidentialcomputing/CONTRIBUTING.rst b/packages/google-cloud-confidentialcomputing/CONTRIBUTING.rst new file mode 100644 index 000000000000..48e2e220fdc1 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/CONTRIBUTING.rst @@ -0,0 +1,281 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-confidentialcomputing + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +We also explicitly decided to support Python 3 beginning with version 3.7. +Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-confidentialcomputing/LICENSE b/packages/google-cloud-confidentialcomputing/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-confidentialcomputing/MANIFEST.in b/packages/google-cloud-confidentialcomputing/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-confidentialcomputing/README.rst b/packages/google-cloud-confidentialcomputing/README.rst new file mode 100644 index 000000000000..e493727c7975 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/README.rst @@ -0,0 +1,107 @@ +Python Client for Confidential Computing API +============================================ + +|preview| |pypi| |versions| + +`Confidential Computing API`_: Protect data in-use with Confidential VMs, Confidential GKE, Confidential Dataproc, and Confidential Space. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-confidentialcomputing.svg + :target: https://pypi.org/project/google-cloud-confidentialcomputing/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-confidentialcomputing.svg + :target: https://pypi.org/project/google-cloud-confidentialcomputing/ +.. _Confidential Computing API: https://cloud.google.com/confidential-computing +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/confidentialcomputing/latest +.. _Product Documentation: https://cloud.google.com/confidential-computing + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Confidential Computing API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Confidential Computing API.: https://cloud.google.com/confidential-computing +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-confidentialcomputing + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-confidentialcomputing + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Confidential Computing API + to see other available methods on the client. +- Read the `Confidential Computing API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Confidential Computing API Product documentation: https://cloud.google.com/confidential-computing +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-confidentialcomputing/SECURITY.md b/packages/google-cloud-confidentialcomputing/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-confidentialcomputing/docs/CHANGELOG.md b/packages/google-cloud-confidentialcomputing/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-confidentialcomputing/docs/README.rst b/packages/google-cloud-confidentialcomputing/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-confidentialcomputing/docs/_static/custom.css b/packages/google-cloud-confidentialcomputing/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-confidentialcomputing/docs/_templates/layout.html b/packages/google-cloud-confidentialcomputing/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-confidentialcomputing/docs/conf.py b/packages/google-cloud-confidentialcomputing/docs/conf.py new file mode 100644 index 000000000000..a79da05bf9a5 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-confidentialcomputing documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-confidentialcomputing" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-confidentialcomputing", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-confidentialcomputing-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-confidentialcomputing.tex", + "google-cloud-confidentialcomputing Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-confidentialcomputing", + "google-cloud-confidentialcomputing Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-confidentialcomputing", + "google-cloud-confidentialcomputing Documentation", + author, + "google-cloud-confidentialcomputing", + "google-cloud-confidentialcomputing Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-confidentialcomputing/docs/confidentialcomputing_v1/confidential_computing.rst b/packages/google-cloud-confidentialcomputing/docs/confidentialcomputing_v1/confidential_computing.rst new file mode 100644 index 000000000000..06d7e01926a9 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/docs/confidentialcomputing_v1/confidential_computing.rst @@ -0,0 +1,6 @@ +ConfidentialComputing +--------------------------------------- + +.. automodule:: google.cloud.confidentialcomputing_v1.services.confidential_computing + :members: + :inherited-members: diff --git a/packages/google-cloud-confidentialcomputing/docs/confidentialcomputing_v1/services.rst b/packages/google-cloud-confidentialcomputing/docs/confidentialcomputing_v1/services.rst new file mode 100644 index 000000000000..0e44260b2eae --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/docs/confidentialcomputing_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Confidentialcomputing v1 API +====================================================== +.. toctree:: + :maxdepth: 2 + + confidential_computing diff --git a/packages/google-cloud-confidentialcomputing/docs/confidentialcomputing_v1/types.rst b/packages/google-cloud-confidentialcomputing/docs/confidentialcomputing_v1/types.rst new file mode 100644 index 000000000000..ae5970c73d34 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/docs/confidentialcomputing_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Confidentialcomputing v1 API +=================================================== + +.. automodule:: google.cloud.confidentialcomputing_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-confidentialcomputing/docs/index.rst b/packages/google-cloud-confidentialcomputing/docs/index.rst new file mode 100644 index 000000000000..493043df9324 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + confidentialcomputing_v1/services + confidentialcomputing_v1/types + + +Changelog +--------- + +For a list of all ``google-cloud-confidentialcomputing`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-confidentialcomputing/docs/multiprocessing.rst b/packages/google-cloud-confidentialcomputing/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/__init__.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/__init__.py new file mode 100644 index 000000000000..e1037c794f76 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/__init__.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.confidentialcomputing import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.confidentialcomputing_v1.services.confidential_computing.async_client import ( + ConfidentialComputingAsyncClient, +) +from google.cloud.confidentialcomputing_v1.services.confidential_computing.client import ( + ConfidentialComputingClient, +) +from google.cloud.confidentialcomputing_v1.types.service import ( + Challenge, + CreateChallengeRequest, + GcpCredentials, + TpmAttestation, + VerifyAttestationRequest, + VerifyAttestationResponse, +) + +__all__ = ( + "ConfidentialComputingClient", + "ConfidentialComputingAsyncClient", + "Challenge", + "CreateChallengeRequest", + "GcpCredentials", + "TpmAttestation", + "VerifyAttestationRequest", + "VerifyAttestationResponse", +) diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py new file mode 100644 index 000000000000..aa80f74315ce --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/py.typed b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/py.typed new file mode 100644 index 000000000000..5e085fa19b06 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-confidentialcomputing package uses inline types. diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/__init__.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/__init__.py new file mode 100644 index 000000000000..d0cc1e1a017b --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/__init__.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.confidentialcomputing_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.confidential_computing import ( + ConfidentialComputingAsyncClient, + ConfidentialComputingClient, +) +from .types.service import ( + Challenge, + CreateChallengeRequest, + GcpCredentials, + TpmAttestation, + VerifyAttestationRequest, + VerifyAttestationResponse, +) + +__all__ = ( + "ConfidentialComputingAsyncClient", + "Challenge", + "ConfidentialComputingClient", + "CreateChallengeRequest", + "GcpCredentials", + "TpmAttestation", + "VerifyAttestationRequest", + "VerifyAttestationResponse", +) diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_metadata.json b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_metadata.json new file mode 100644 index 000000000000..1f03d82e18a7 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_metadata.json @@ -0,0 +1,58 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.confidentialcomputing_v1", + "protoPackage": "google.cloud.confidentialcomputing.v1", + "schema": "1.0", + "services": { + "ConfidentialComputing": { + "clients": { + "grpc": { + "libraryClient": "ConfidentialComputingClient", + "rpcs": { + "CreateChallenge": { + "methods": [ + "create_challenge" + ] + }, + "VerifyAttestation": { + "methods": [ + "verify_attestation" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ConfidentialComputingAsyncClient", + "rpcs": { + "CreateChallenge": { + "methods": [ + "create_challenge" + ] + }, + "VerifyAttestation": { + "methods": [ + "verify_attestation" + ] + } + } + }, + "rest": { + "libraryClient": "ConfidentialComputingClient", + "rpcs": { + "CreateChallenge": { + "methods": [ + "create_challenge" + ] + }, + "VerifyAttestation": { + "methods": [ + "verify_attestation" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py new file mode 100644 index 000000000000..aa80f74315ce --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/py.typed b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/py.typed new file mode 100644 index 000000000000..5e085fa19b06 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-confidentialcomputing package uses inline types. diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/__init__.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/__init__.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/__init__.py new file mode 100644 index 000000000000..14c39e306d13 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ConfidentialComputingAsyncClient +from .client import ConfidentialComputingClient + +__all__ = ( + "ConfidentialComputingClient", + "ConfidentialComputingAsyncClient", +) diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/async_client.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/async_client.py new file mode 100644 index 000000000000..ec8c18d0b8b4 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/async_client.py @@ -0,0 +1,563 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.confidentialcomputing_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.confidentialcomputing_v1.types import service + +from .client import ConfidentialComputingClient +from .transports.base import DEFAULT_CLIENT_INFO, ConfidentialComputingTransport +from .transports.grpc_asyncio import ConfidentialComputingGrpcAsyncIOTransport + + +class ConfidentialComputingAsyncClient: + """Service describing handlers for resources""" + + _client: ConfidentialComputingClient + + DEFAULT_ENDPOINT = ConfidentialComputingClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ConfidentialComputingClient.DEFAULT_MTLS_ENDPOINT + + challenge_path = staticmethod(ConfidentialComputingClient.challenge_path) + parse_challenge_path = staticmethod( + ConfidentialComputingClient.parse_challenge_path + ) + common_billing_account_path = staticmethod( + ConfidentialComputingClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ConfidentialComputingClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ConfidentialComputingClient.common_folder_path) + parse_common_folder_path = staticmethod( + ConfidentialComputingClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ConfidentialComputingClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ConfidentialComputingClient.parse_common_organization_path + ) + common_project_path = staticmethod(ConfidentialComputingClient.common_project_path) + parse_common_project_path = staticmethod( + ConfidentialComputingClient.parse_common_project_path + ) + common_location_path = staticmethod( + ConfidentialComputingClient.common_location_path + ) + parse_common_location_path = staticmethod( + ConfidentialComputingClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfidentialComputingAsyncClient: The constructed client. + """ + return ConfidentialComputingClient.from_service_account_info.__func__(ConfidentialComputingAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfidentialComputingAsyncClient: The constructed client. + """ + return ConfidentialComputingClient.from_service_account_file.__func__(ConfidentialComputingAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ConfidentialComputingClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ConfidentialComputingTransport: + """Returns the transport used by the client instance. + + Returns: + ConfidentialComputingTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ConfidentialComputingClient).get_transport_class, + type(ConfidentialComputingClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ConfidentialComputingTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the confidential computing client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ConfidentialComputingTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ConfidentialComputingClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_challenge( + self, + request: Optional[Union[service.CreateChallengeRequest, dict]] = None, + *, + parent: Optional[str] = None, + challenge: Optional[service.Challenge] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Challenge: + r"""Creates a new Challenge in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import confidentialcomputing_v1 + + async def sample_create_challenge(): + # Create a client + client = confidentialcomputing_v1.ConfidentialComputingAsyncClient() + + # Initialize request argument(s) + request = confidentialcomputing_v1.CreateChallengeRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_challenge(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.confidentialcomputing_v1.types.CreateChallengeRequest, dict]]): + The request object. Message for creating a Challenge + parent (:class:`str`): + Required. The resource name of the location where the + Challenge will be used, in the format + ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + challenge (:class:`google.cloud.confidentialcomputing_v1.types.Challenge`): + Required. The Challenge to be + created. Currently this field can be + empty as all the Challenge fields are + set by the server. + + This corresponds to the ``challenge`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.confidentialcomputing_v1.types.Challenge: + A Challenge from the server used to + guarantee freshness of attestations + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, challenge]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateChallengeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if challenge is not None: + request.challenge = challenge + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_challenge, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def verify_attestation( + self, + request: Optional[Union[service.VerifyAttestationRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.VerifyAttestationResponse: + r"""Verifies the provided attestation info, returning a + signed OIDC token. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import confidentialcomputing_v1 + + async def sample_verify_attestation(): + # Create a client + client = confidentialcomputing_v1.ConfidentialComputingAsyncClient() + + # Initialize request argument(s) + request = confidentialcomputing_v1.VerifyAttestationRequest( + challenge="challenge_value", + ) + + # Make the request + response = await client.verify_attestation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.confidentialcomputing_v1.types.VerifyAttestationRequest, dict]]): + The request object. A request for an OIDC token, + providing all the necessary information + needed for this service to verify the + plaform state of the requestor. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.confidentialcomputing_v1.types.VerifyAttestationResponse: + A response once an attestation has + been successfully verified, containing a + signed OIDC token. + + """ + # Create or coerce a protobuf request object. + request = service.VerifyAttestationRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.verify_attestation, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("challenge", request.challenge),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ConfidentialComputingAsyncClient",) diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py new file mode 100644 index 000000000000..aa08b54b6973 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py @@ -0,0 +1,777 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.confidentialcomputing_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.confidentialcomputing_v1.types import service + +from .transports.base import DEFAULT_CLIENT_INFO, ConfidentialComputingTransport +from .transports.grpc import ConfidentialComputingGrpcTransport +from .transports.grpc_asyncio import ConfidentialComputingGrpcAsyncIOTransport +from .transports.rest import ConfidentialComputingRestTransport + + +class ConfidentialComputingClientMeta(type): + """Metaclass for the ConfidentialComputing client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ConfidentialComputingTransport]] + _transport_registry["grpc"] = ConfidentialComputingGrpcTransport + _transport_registry["grpc_asyncio"] = ConfidentialComputingGrpcAsyncIOTransport + _transport_registry["rest"] = ConfidentialComputingRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ConfidentialComputingTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ConfidentialComputingClient(metaclass=ConfidentialComputingClientMeta): + """Service describing handlers for resources""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "confidentialcomputing.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfidentialComputingClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfidentialComputingClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ConfidentialComputingTransport: + """Returns the transport used by the client instance. + + Returns: + ConfidentialComputingTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def challenge_path( + project: str, + location: str, + uuid: str, + ) -> str: + """Returns a fully-qualified challenge string.""" + return "projects/{project}/locations/{location}/challenges/{uuid}".format( + project=project, + location=location, + uuid=uuid, + ) + + @staticmethod + def parse_challenge_path(path: str) -> Dict[str, str]: + """Parses a challenge path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/challenges/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ConfidentialComputingTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the confidential computing client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ConfidentialComputingTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ConfidentialComputingTransport): + # transport is a ConfidentialComputingTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_challenge( + self, + request: Optional[Union[service.CreateChallengeRequest, dict]] = None, + *, + parent: Optional[str] = None, + challenge: Optional[service.Challenge] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Challenge: + r"""Creates a new Challenge in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import confidentialcomputing_v1 + + def sample_create_challenge(): + # Create a client + client = confidentialcomputing_v1.ConfidentialComputingClient() + + # Initialize request argument(s) + request = confidentialcomputing_v1.CreateChallengeRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_challenge(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.confidentialcomputing_v1.types.CreateChallengeRequest, dict]): + The request object. Message for creating a Challenge + parent (str): + Required. The resource name of the location where the + Challenge will be used, in the format + ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + challenge (google.cloud.confidentialcomputing_v1.types.Challenge): + Required. The Challenge to be + created. Currently this field can be + empty as all the Challenge fields are + set by the server. + + This corresponds to the ``challenge`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.confidentialcomputing_v1.types.Challenge: + A Challenge from the server used to + guarantee freshness of attestations + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, challenge]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateChallengeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateChallengeRequest): + request = service.CreateChallengeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if challenge is not None: + request.challenge = challenge + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_challenge] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def verify_attestation( + self, + request: Optional[Union[service.VerifyAttestationRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.VerifyAttestationResponse: + r"""Verifies the provided attestation info, returning a + signed OIDC token. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import confidentialcomputing_v1 + + def sample_verify_attestation(): + # Create a client + client = confidentialcomputing_v1.ConfidentialComputingClient() + + # Initialize request argument(s) + request = confidentialcomputing_v1.VerifyAttestationRequest( + challenge="challenge_value", + ) + + # Make the request + response = client.verify_attestation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.confidentialcomputing_v1.types.VerifyAttestationRequest, dict]): + The request object. A request for an OIDC token, + providing all the necessary information + needed for this service to verify the + plaform state of the requestor. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.confidentialcomputing_v1.types.VerifyAttestationResponse: + A response once an attestation has + been successfully verified, containing a + signed OIDC token. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.VerifyAttestationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.VerifyAttestationRequest): + request = service.VerifyAttestationRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.verify_attestation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("challenge", request.challenge),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ConfidentialComputingClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ConfidentialComputingClient",) diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/__init__.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/__init__.py new file mode 100644 index 000000000000..276ffaa964db --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ConfidentialComputingTransport +from .grpc import ConfidentialComputingGrpcTransport +from .grpc_asyncio import ConfidentialComputingGrpcAsyncIOTransport +from .rest import ( + ConfidentialComputingRestInterceptor, + ConfidentialComputingRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[ConfidentialComputingTransport]] +_transport_registry["grpc"] = ConfidentialComputingGrpcTransport +_transport_registry["grpc_asyncio"] = ConfidentialComputingGrpcAsyncIOTransport +_transport_registry["rest"] = ConfidentialComputingRestTransport + +__all__ = ( + "ConfidentialComputingTransport", + "ConfidentialComputingGrpcTransport", + "ConfidentialComputingGrpcAsyncIOTransport", + "ConfidentialComputingRestTransport", + "ConfidentialComputingRestInterceptor", +) diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/base.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/base.py new file mode 100644 index 000000000000..5a141970bb64 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/base.py @@ -0,0 +1,212 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.confidentialcomputing_v1 import gapic_version as package_version +from google.cloud.confidentialcomputing_v1.types import service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ConfidentialComputingTransport(abc.ABC): + """Abstract transport class for ConfidentialComputing.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "confidentialcomputing.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_challenge: gapic_v1.method.wrap_method( + self.create_challenge, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.verify_attestation: gapic_v1.method.wrap_method( + self.verify_attestation, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_challenge( + self, + ) -> Callable[ + [service.CreateChallengeRequest], + Union[service.Challenge, Awaitable[service.Challenge]], + ]: + raise NotImplementedError() + + @property + def verify_attestation( + self, + ) -> Callable[ + [service.VerifyAttestationRequest], + Union[ + service.VerifyAttestationResponse, + Awaitable[service.VerifyAttestationResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ConfidentialComputingTransport",) diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/grpc.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/grpc.py new file mode 100644 index 000000000000..48b1dbba1094 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/grpc.py @@ -0,0 +1,332 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.confidentialcomputing_v1.types import service + +from .base import DEFAULT_CLIENT_INFO, ConfidentialComputingTransport + + +class ConfidentialComputingGrpcTransport(ConfidentialComputingTransport): + """gRPC backend transport for ConfidentialComputing. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "confidentialcomputing.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "confidentialcomputing.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_challenge( + self, + ) -> Callable[[service.CreateChallengeRequest], service.Challenge]: + r"""Return a callable for the create challenge method over gRPC. + + Creates a new Challenge in a given project and + location. + + Returns: + Callable[[~.CreateChallengeRequest], + ~.Challenge]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_challenge" not in self._stubs: + self._stubs["create_challenge"] = self.grpc_channel.unary_unary( + "/google.cloud.confidentialcomputing.v1.ConfidentialComputing/CreateChallenge", + request_serializer=service.CreateChallengeRequest.serialize, + response_deserializer=service.Challenge.deserialize, + ) + return self._stubs["create_challenge"] + + @property + def verify_attestation( + self, + ) -> Callable[ + [service.VerifyAttestationRequest], service.VerifyAttestationResponse + ]: + r"""Return a callable for the verify attestation method over gRPC. + + Verifies the provided attestation info, returning a + signed OIDC token. + + Returns: + Callable[[~.VerifyAttestationRequest], + ~.VerifyAttestationResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "verify_attestation" not in self._stubs: + self._stubs["verify_attestation"] = self.grpc_channel.unary_unary( + "/google.cloud.confidentialcomputing.v1.ConfidentialComputing/VerifyAttestation", + request_serializer=service.VerifyAttestationRequest.serialize, + response_deserializer=service.VerifyAttestationResponse.deserialize, + ) + return self._stubs["verify_attestation"] + + def close(self): + self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ConfidentialComputingGrpcTransport",) diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/grpc_asyncio.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/grpc_asyncio.py new file mode 100644 index 000000000000..3f44d873a7b3 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/grpc_asyncio.py @@ -0,0 +1,331 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.confidentialcomputing_v1.types import service + +from .base import DEFAULT_CLIENT_INFO, ConfidentialComputingTransport +from .grpc import ConfidentialComputingGrpcTransport + + +class ConfidentialComputingGrpcAsyncIOTransport(ConfidentialComputingTransport): + """gRPC AsyncIO backend transport for ConfidentialComputing. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "confidentialcomputing.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "confidentialcomputing.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_challenge( + self, + ) -> Callable[[service.CreateChallengeRequest], Awaitable[service.Challenge]]: + r"""Return a callable for the create challenge method over gRPC. + + Creates a new Challenge in a given project and + location. + + Returns: + Callable[[~.CreateChallengeRequest], + Awaitable[~.Challenge]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_challenge" not in self._stubs: + self._stubs["create_challenge"] = self.grpc_channel.unary_unary( + "/google.cloud.confidentialcomputing.v1.ConfidentialComputing/CreateChallenge", + request_serializer=service.CreateChallengeRequest.serialize, + response_deserializer=service.Challenge.deserialize, + ) + return self._stubs["create_challenge"] + + @property + def verify_attestation( + self, + ) -> Callable[ + [service.VerifyAttestationRequest], Awaitable[service.VerifyAttestationResponse] + ]: + r"""Return a callable for the verify attestation method over gRPC. + + Verifies the provided attestation info, returning a + signed OIDC token. + + Returns: + Callable[[~.VerifyAttestationRequest], + Awaitable[~.VerifyAttestationResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "verify_attestation" not in self._stubs: + self._stubs["verify_attestation"] = self.grpc_channel.unary_unary( + "/google.cloud.confidentialcomputing.v1.ConfidentialComputing/VerifyAttestation", + request_serializer=service.VerifyAttestationRequest.serialize, + response_deserializer=service.VerifyAttestationResponse.deserialize, + ) + return self._stubs["verify_attestation"] + + def close(self): + return self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("ConfidentialComputingGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/rest.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/rest.py new file mode 100644 index 000000000000..a4e75cf80825 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/rest.py @@ -0,0 +1,638 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.confidentialcomputing_v1.types import service + +from .base import ConfidentialComputingTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ConfidentialComputingRestInterceptor: + """Interceptor for ConfidentialComputing. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ConfidentialComputingRestTransport. + + .. code-block:: python + class MyCustomConfidentialComputingInterceptor(ConfidentialComputingRestInterceptor): + def pre_create_challenge(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_challenge(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_verify_attestation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_verify_attestation(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ConfidentialComputingRestTransport(interceptor=MyCustomConfidentialComputingInterceptor()) + client = ConfidentialComputingClient(transport=transport) + + + """ + + def pre_create_challenge( + self, + request: service.CreateChallengeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.CreateChallengeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_challenge + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfidentialComputing server. + """ + return request, metadata + + def post_create_challenge(self, response: service.Challenge) -> service.Challenge: + """Post-rpc interceptor for create_challenge + + Override in a subclass to manipulate the response + after it is returned by the ConfidentialComputing server but before + it is returned to user code. + """ + return response + + def pre_verify_attestation( + self, + request: service.VerifyAttestationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.VerifyAttestationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for verify_attestation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfidentialComputing server. + """ + return request, metadata + + def post_verify_attestation( + self, response: service.VerifyAttestationResponse + ) -> service.VerifyAttestationResponse: + """Post-rpc interceptor for verify_attestation + + Override in a subclass to manipulate the response + after it is returned by the ConfidentialComputing server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfidentialComputing server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the ConfidentialComputing server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfidentialComputing server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the ConfidentialComputing server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ConfidentialComputingRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ConfidentialComputingRestInterceptor + + +class ConfidentialComputingRestTransport(ConfidentialComputingTransport): + """REST backend transport for ConfidentialComputing. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "confidentialcomputing.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ConfidentialComputingRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ConfidentialComputingRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateChallenge(ConfidentialComputingRestStub): + def __hash__(self): + return hash("CreateChallenge") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateChallengeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Challenge: + r"""Call the create challenge method over HTTP. + + Args: + request (~.service.CreateChallengeRequest): + The request object. Message for creating a Challenge + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.Challenge: + A Challenge from the server used to + guarantee freshness of attestations + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/challenges", + "body": "challenge", + }, + ] + request, metadata = self._interceptor.pre_create_challenge( + request, metadata + ) + pb_request = service.CreateChallengeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.Challenge() + pb_resp = service.Challenge.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_challenge(resp) + return resp + + class _VerifyAttestation(ConfidentialComputingRestStub): + def __hash__(self): + return hash("VerifyAttestation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.VerifyAttestationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.VerifyAttestationResponse: + r"""Call the verify attestation method over HTTP. + + Args: + request (~.service.VerifyAttestationRequest): + The request object. A request for an OIDC token, + providing all the necessary information + needed for this service to verify the + plaform state of the requestor. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.VerifyAttestationResponse: + A response once an attestation has + been successfully verified, containing a + signed OIDC token. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{challenge=projects/*/locations/*/challenges/*}:verifyAttestation", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_verify_attestation( + request, metadata + ) + pb_request = service.VerifyAttestationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.VerifyAttestationResponse() + pb_resp = service.VerifyAttestationResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_verify_attestation(resp) + return resp + + @property + def create_challenge( + self, + ) -> Callable[[service.CreateChallengeRequest], service.Challenge]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateChallenge(self._session, self._host, self._interceptor) # type: ignore + + @property + def verify_attestation( + self, + ) -> Callable[ + [service.VerifyAttestationRequest], service.VerifyAttestationResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._VerifyAttestation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(ConfidentialComputingRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(ConfidentialComputingRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ConfidentialComputingRestTransport",) diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/types/__init__.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/types/__init__.py new file mode 100644 index 000000000000..00c02a36cbf0 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/types/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .service import ( + Challenge, + CreateChallengeRequest, + GcpCredentials, + TpmAttestation, + VerifyAttestationRequest, + VerifyAttestationResponse, +) + +__all__ = ( + "Challenge", + "CreateChallengeRequest", + "GcpCredentials", + "TpmAttestation", + "VerifyAttestationRequest", + "VerifyAttestationResponse", +) diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/types/service.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/types/service.py new file mode 100644 index 000000000000..1d6d4a843bb3 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/types/service.py @@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.confidentialcomputing.v1", + manifest={ + "Challenge", + "CreateChallengeRequest", + "VerifyAttestationRequest", + "VerifyAttestationResponse", + "GcpCredentials", + "TpmAttestation", + }, +) + + +class Challenge(proto.Message): + r"""A Challenge from the server used to guarantee freshness of + attestations + + Attributes: + name (str): + Output only. The resource name for this Challenge in the + format ``projects/*/locations/*/challenges/*`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which this Challenge + was created + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which this Challenge + will no longer be usable. It is also the + expiration time for any tokens generated from + this Challenge. + used (bool): + Output only. Indicates if this challenge has + been used to generate a token. + tpm_nonce (str): + Output only. Identical to nonce, but as a + string. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + used: bool = proto.Field( + proto.BOOL, + number=4, + ) + tpm_nonce: str = proto.Field( + proto.STRING, + number=6, + ) + + +class CreateChallengeRequest(proto.Message): + r"""Message for creating a Challenge + + Attributes: + parent (str): + Required. The resource name of the location where the + Challenge will be used, in the format + ``projects/*/locations/*``. + challenge (google.cloud.confidentialcomputing_v1.types.Challenge): + Required. The Challenge to be created. + Currently this field can be empty as all the + Challenge fields are set by the server. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + challenge: "Challenge" = proto.Field( + proto.MESSAGE, + number=2, + message="Challenge", + ) + + +class VerifyAttestationRequest(proto.Message): + r"""A request for an OIDC token, providing all the necessary + information needed for this service to verify the plaform state + of the requestor. + + Attributes: + challenge (str): + Required. The name of the Challenge whose nonce was used to + generate the attestation, in the format + ``projects/*/locations/*/challenges/*``. The provided + Challenge will be consumed, and cannot be used again. + gcp_credentials (google.cloud.confidentialcomputing_v1.types.GcpCredentials): + Optional. Credentials used to populate the "emails" claim in + the claims_token. + tpm_attestation (google.cloud.confidentialcomputing_v1.types.TpmAttestation): + Required. The TPM-specific data provided by + the attesting platform, used to populate any of + the claims regarding platform state. + """ + + challenge: str = proto.Field( + proto.STRING, + number=1, + ) + gcp_credentials: "GcpCredentials" = proto.Field( + proto.MESSAGE, + number=2, + message="GcpCredentials", + ) + tpm_attestation: "TpmAttestation" = proto.Field( + proto.MESSAGE, + number=3, + message="TpmAttestation", + ) + + +class VerifyAttestationResponse(proto.Message): + r"""A response once an attestation has been successfully + verified, containing a signed OIDC token. + + Attributes: + oidc_claims_token (str): + Output only. Same as claims_token, but as a string. + """ + + oidc_claims_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GcpCredentials(proto.Message): + r"""Credentials issued by GCP which are linked to the platform + attestation. These will be verified server-side as part of + attestaion verification. + + Attributes: + service_account_id_tokens (MutableSequence[str]): + Same as id_tokens, but as a string. + """ + + service_account_id_tokens: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class TpmAttestation(proto.Message): + r"""TPM2 data containing everything necessary to validate any + platform state measured into the TPM. + + Attributes: + quotes (MutableSequence[google.cloud.confidentialcomputing_v1.types.TpmAttestation.Quote]): + TPM2 PCR Quotes generated by calling TPM2_Quote on each PCR + bank. + tcg_event_log (bytes): + The binary TCG Event Log containing events + measured into the TPM by the platform firmware + and operating system. Formatted as described in + the "TCG PC Client Platform Firmware Profile + Specification". + canonical_event_log (bytes): + An Event Log containing additional events measured into the + TPM that are not already present in the tcg_event_log. + Formatted as described in the "Canonical Event Log Format" + TCG Specification. + ak_cert (bytes): + DER-encoded X.509 certificate of the + Attestation Key (otherwise known as an AK or a + TPM restricted signing key) used to generate the + quotes. + cert_chain (MutableSequence[bytes]): + List of DER-encoded X.509 certificates which, together with + the ak_cert, chain back to a trusted Root Certificate. + """ + + class Quote(proto.Message): + r"""Information about Platform Control Registers (PCRs) including + a signature over their values, which can be used for remote + validation. + + Attributes: + hash_algo (int): + The hash algorithm of the PCR bank being quoted, encoded as + a TPM_ALG_ID + pcr_values (MutableMapping[int, bytes]): + Raw binary values of each PCRs being quoted. + raw_quote (bytes): + TPM2 quote, encoded as a TPMS_ATTEST + raw_signature (bytes): + TPM2 signature, encoded as a TPMT_SIGNATURE + """ + + hash_algo: int = proto.Field( + proto.INT32, + number=1, + ) + pcr_values: MutableMapping[int, bytes] = proto.MapField( + proto.INT32, + proto.BYTES, + number=2, + ) + raw_quote: bytes = proto.Field( + proto.BYTES, + number=3, + ) + raw_signature: bytes = proto.Field( + proto.BYTES, + number=4, + ) + + quotes: MutableSequence[Quote] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Quote, + ) + tcg_event_log: bytes = proto.Field( + proto.BYTES, + number=2, + ) + canonical_event_log: bytes = proto.Field( + proto.BYTES, + number=3, + ) + ak_cert: bytes = proto.Field( + proto.BYTES, + number=4, + ) + cert_chain: MutableSequence[bytes] = proto.RepeatedField( + proto.BYTES, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-confidentialcomputing/mypy.ini b/packages/google-cloud-confidentialcomputing/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-confidentialcomputing/noxfile.py b/packages/google-cloud-confidentialcomputing/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-cloud-confidentialcomputing/renovate.json b/packages/google-cloud-confidentialcomputing/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-confidentialcomputing/samples/generated_samples/confidentialcomputing_v1_generated_confidential_computing_create_challenge_async.py b/packages/google-cloud-confidentialcomputing/samples/generated_samples/confidentialcomputing_v1_generated_confidential_computing_create_challenge_async.py new file mode 100644 index 000000000000..c2994c0c4101 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/samples/generated_samples/confidentialcomputing_v1_generated_confidential_computing_create_challenge_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateChallenge +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-confidentialcomputing + + +# [START confidentialcomputing_v1_generated_ConfidentialComputing_CreateChallenge_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import confidentialcomputing_v1 + + +async def sample_create_challenge(): + # Create a client + client = confidentialcomputing_v1.ConfidentialComputingAsyncClient() + + # Initialize request argument(s) + request = confidentialcomputing_v1.CreateChallengeRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_challenge(request=request) + + # Handle the response + print(response) + +# [END confidentialcomputing_v1_generated_ConfidentialComputing_CreateChallenge_async] diff --git a/packages/google-cloud-confidentialcomputing/samples/generated_samples/confidentialcomputing_v1_generated_confidential_computing_create_challenge_sync.py b/packages/google-cloud-confidentialcomputing/samples/generated_samples/confidentialcomputing_v1_generated_confidential_computing_create_challenge_sync.py new file mode 100644 index 000000000000..58fdc39df7a0 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/samples/generated_samples/confidentialcomputing_v1_generated_confidential_computing_create_challenge_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateChallenge +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-confidentialcomputing + + +# [START confidentialcomputing_v1_generated_ConfidentialComputing_CreateChallenge_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import confidentialcomputing_v1 + + +def sample_create_challenge(): + # Create a client + client = confidentialcomputing_v1.ConfidentialComputingClient() + + # Initialize request argument(s) + request = confidentialcomputing_v1.CreateChallengeRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_challenge(request=request) + + # Handle the response + print(response) + +# [END confidentialcomputing_v1_generated_ConfidentialComputing_CreateChallenge_sync] diff --git a/packages/google-cloud-confidentialcomputing/samples/generated_samples/confidentialcomputing_v1_generated_confidential_computing_verify_attestation_async.py b/packages/google-cloud-confidentialcomputing/samples/generated_samples/confidentialcomputing_v1_generated_confidential_computing_verify_attestation_async.py new file mode 100644 index 000000000000..66280221a55d --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/samples/generated_samples/confidentialcomputing_v1_generated_confidential_computing_verify_attestation_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for VerifyAttestation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-confidentialcomputing + + +# [START confidentialcomputing_v1_generated_ConfidentialComputing_VerifyAttestation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import confidentialcomputing_v1 + + +async def sample_verify_attestation(): + # Create a client + client = confidentialcomputing_v1.ConfidentialComputingAsyncClient() + + # Initialize request argument(s) + request = confidentialcomputing_v1.VerifyAttestationRequest( + challenge="challenge_value", + ) + + # Make the request + response = await client.verify_attestation(request=request) + + # Handle the response + print(response) + +# [END confidentialcomputing_v1_generated_ConfidentialComputing_VerifyAttestation_async] diff --git a/packages/google-cloud-confidentialcomputing/samples/generated_samples/confidentialcomputing_v1_generated_confidential_computing_verify_attestation_sync.py b/packages/google-cloud-confidentialcomputing/samples/generated_samples/confidentialcomputing_v1_generated_confidential_computing_verify_attestation_sync.py new file mode 100644 index 000000000000..4670757234aa --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/samples/generated_samples/confidentialcomputing_v1_generated_confidential_computing_verify_attestation_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for VerifyAttestation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-confidentialcomputing + + +# [START confidentialcomputing_v1_generated_ConfidentialComputing_VerifyAttestation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import confidentialcomputing_v1 + + +def sample_verify_attestation(): + # Create a client + client = confidentialcomputing_v1.ConfidentialComputingClient() + + # Initialize request argument(s) + request = confidentialcomputing_v1.VerifyAttestationRequest( + challenge="challenge_value", + ) + + # Make the request + response = client.verify_attestation(request=request) + + # Handle the response + print(response) + +# [END confidentialcomputing_v1_generated_ConfidentialComputing_VerifyAttestation_sync] diff --git a/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json b/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json new file mode 100644 index 000000000000..957efb1f23a6 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json @@ -0,0 +1,337 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.confidentialcomputing.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-confidentialcomputing", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.confidentialcomputing_v1.ConfidentialComputingAsyncClient", + "shortName": "ConfidentialComputingAsyncClient" + }, + "fullName": "google.cloud.confidentialcomputing_v1.ConfidentialComputingAsyncClient.create_challenge", + "method": { + "fullName": "google.cloud.confidentialcomputing.v1.ConfidentialComputing.CreateChallenge", + "service": { + "fullName": "google.cloud.confidentialcomputing.v1.ConfidentialComputing", + "shortName": "ConfidentialComputing" + }, + "shortName": "CreateChallenge" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.confidentialcomputing_v1.types.CreateChallengeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "challenge", + "type": "google.cloud.confidentialcomputing_v1.types.Challenge" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.confidentialcomputing_v1.types.Challenge", + "shortName": "create_challenge" + }, + "description": "Sample for CreateChallenge", + "file": "confidentialcomputing_v1_generated_confidential_computing_create_challenge_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "confidentialcomputing_v1_generated_ConfidentialComputing_CreateChallenge_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "confidentialcomputing_v1_generated_confidential_computing_create_challenge_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.confidentialcomputing_v1.ConfidentialComputingClient", + "shortName": "ConfidentialComputingClient" + }, + "fullName": "google.cloud.confidentialcomputing_v1.ConfidentialComputingClient.create_challenge", + "method": { + "fullName": "google.cloud.confidentialcomputing.v1.ConfidentialComputing.CreateChallenge", + "service": { + "fullName": "google.cloud.confidentialcomputing.v1.ConfidentialComputing", + "shortName": "ConfidentialComputing" + }, + "shortName": "CreateChallenge" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.confidentialcomputing_v1.types.CreateChallengeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "challenge", + "type": "google.cloud.confidentialcomputing_v1.types.Challenge" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.confidentialcomputing_v1.types.Challenge", + "shortName": "create_challenge" + }, + "description": "Sample for CreateChallenge", + "file": "confidentialcomputing_v1_generated_confidential_computing_create_challenge_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "confidentialcomputing_v1_generated_ConfidentialComputing_CreateChallenge_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "confidentialcomputing_v1_generated_confidential_computing_create_challenge_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.confidentialcomputing_v1.ConfidentialComputingAsyncClient", + "shortName": "ConfidentialComputingAsyncClient" + }, + "fullName": "google.cloud.confidentialcomputing_v1.ConfidentialComputingAsyncClient.verify_attestation", + "method": { + "fullName": "google.cloud.confidentialcomputing.v1.ConfidentialComputing.VerifyAttestation", + "service": { + "fullName": "google.cloud.confidentialcomputing.v1.ConfidentialComputing", + "shortName": "ConfidentialComputing" + }, + "shortName": "VerifyAttestation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.confidentialcomputing_v1.types.VerifyAttestationRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.confidentialcomputing_v1.types.VerifyAttestationResponse", + "shortName": "verify_attestation" + }, + "description": "Sample for VerifyAttestation", + "file": "confidentialcomputing_v1_generated_confidential_computing_verify_attestation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "confidentialcomputing_v1_generated_ConfidentialComputing_VerifyAttestation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "confidentialcomputing_v1_generated_confidential_computing_verify_attestation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.confidentialcomputing_v1.ConfidentialComputingClient", + "shortName": "ConfidentialComputingClient" + }, + "fullName": "google.cloud.confidentialcomputing_v1.ConfidentialComputingClient.verify_attestation", + "method": { + "fullName": "google.cloud.confidentialcomputing.v1.ConfidentialComputing.VerifyAttestation", + "service": { + "fullName": "google.cloud.confidentialcomputing.v1.ConfidentialComputing", + "shortName": "ConfidentialComputing" + }, + "shortName": "VerifyAttestation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.confidentialcomputing_v1.types.VerifyAttestationRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.confidentialcomputing_v1.types.VerifyAttestationResponse", + "shortName": "verify_attestation" + }, + "description": "Sample for VerifyAttestation", + "file": "confidentialcomputing_v1_generated_confidential_computing_verify_attestation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "confidentialcomputing_v1_generated_ConfidentialComputing_VerifyAttestation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "confidentialcomputing_v1_generated_confidential_computing_verify_attestation_sync.py" + } + ] +} diff --git a/packages/google-cloud-confidentialcomputing/scripts/decrypt-secrets.sh b/packages/google-cloud-confidentialcomputing/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-confidentialcomputing/scripts/fixup_confidentialcomputing_v1_keywords.py b/packages/google-cloud-confidentialcomputing/scripts/fixup_confidentialcomputing_v1_keywords.py new file mode 100644 index 000000000000..d1bab5277bf0 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/scripts/fixup_confidentialcomputing_v1_keywords.py @@ -0,0 +1,177 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class confidentialcomputingCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_challenge': ('parent', 'challenge', ), + 'verify_attestation': ('challenge', 'tpm_attestation', 'gcp_credentials', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=confidentialcomputingCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the confidentialcomputing client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-confidentialcomputing/setup.py b/packages/google-cloud-confidentialcomputing/setup.py new file mode 100644 index 000000000000..610ebca67a9a --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-confidentialcomputing" + + +description = "Google Cloud Confidentialcomputing API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/confidentialcomputing/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-confidentialcomputing/testing/.gitignore b/packages/google-cloud-confidentialcomputing/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-confidentialcomputing/testing/constraints-3.10.txt b/packages/google-cloud-confidentialcomputing/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-confidentialcomputing/testing/constraints-3.11.txt b/packages/google-cloud-confidentialcomputing/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-confidentialcomputing/testing/constraints-3.12.txt b/packages/google-cloud-confidentialcomputing/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-confidentialcomputing/testing/constraints-3.7.txt b/packages/google-cloud-confidentialcomputing/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-confidentialcomputing/testing/constraints-3.8.txt b/packages/google-cloud-confidentialcomputing/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-confidentialcomputing/testing/constraints-3.9.txt b/packages/google-cloud-confidentialcomputing/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-confidentialcomputing/tests/__init__.py b/packages/google-cloud-confidentialcomputing/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-confidentialcomputing/tests/unit/__init__.py b/packages/google-cloud-confidentialcomputing/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-confidentialcomputing/tests/unit/gapic/__init__.py b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/__init__.py b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py new file mode 100644 index 000000000000..b96debc4e3e9 --- /dev/null +++ b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py @@ -0,0 +1,2848 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.confidentialcomputing_v1.services.confidential_computing import ( + ConfidentialComputingAsyncClient, + ConfidentialComputingClient, + transports, +) +from google.cloud.confidentialcomputing_v1.types import service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ConfidentialComputingClient._get_default_mtls_endpoint(None) is None + assert ( + ConfidentialComputingClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ConfidentialComputingClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ConfidentialComputingClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ConfidentialComputingClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ConfidentialComputingClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ConfidentialComputingClient, "grpc"), + (ConfidentialComputingAsyncClient, "grpc_asyncio"), + (ConfidentialComputingClient, "rest"), + ], +) +def test_confidential_computing_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "confidentialcomputing.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://confidentialcomputing.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ConfidentialComputingGrpcTransport, "grpc"), + (transports.ConfidentialComputingGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ConfidentialComputingRestTransport, "rest"), + ], +) +def test_confidential_computing_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ConfidentialComputingClient, "grpc"), + (ConfidentialComputingAsyncClient, "grpc_asyncio"), + (ConfidentialComputingClient, "rest"), + ], +) +def test_confidential_computing_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "confidentialcomputing.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://confidentialcomputing.googleapis.com" + ) + + +def test_confidential_computing_client_get_transport_class(): + transport = ConfidentialComputingClient.get_transport_class() + available_transports = [ + transports.ConfidentialComputingGrpcTransport, + transports.ConfidentialComputingRestTransport, + ] + assert transport in available_transports + + transport = ConfidentialComputingClient.get_transport_class("grpc") + assert transport == transports.ConfidentialComputingGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ConfidentialComputingClient, + transports.ConfidentialComputingGrpcTransport, + "grpc", + ), + ( + ConfidentialComputingAsyncClient, + transports.ConfidentialComputingGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + ConfidentialComputingClient, + transports.ConfidentialComputingRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + ConfidentialComputingClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfidentialComputingClient), +) +@mock.patch.object( + ConfidentialComputingAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfidentialComputingAsyncClient), +) +def test_confidential_computing_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ConfidentialComputingClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ConfidentialComputingClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ConfidentialComputingClient, + transports.ConfidentialComputingGrpcTransport, + "grpc", + "true", + ), + ( + ConfidentialComputingAsyncClient, + transports.ConfidentialComputingGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + ConfidentialComputingClient, + transports.ConfidentialComputingGrpcTransport, + "grpc", + "false", + ), + ( + ConfidentialComputingAsyncClient, + transports.ConfidentialComputingGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + ConfidentialComputingClient, + transports.ConfidentialComputingRestTransport, + "rest", + "true", + ), + ( + ConfidentialComputingClient, + transports.ConfidentialComputingRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + ConfidentialComputingClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfidentialComputingClient), +) +@mock.patch.object( + ConfidentialComputingAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfidentialComputingAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_confidential_computing_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ConfidentialComputingClient, ConfidentialComputingAsyncClient] +) +@mock.patch.object( + ConfidentialComputingClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfidentialComputingClient), +) +@mock.patch.object( + ConfidentialComputingAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfidentialComputingAsyncClient), +) +def test_confidential_computing_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ConfidentialComputingClient, + transports.ConfidentialComputingGrpcTransport, + "grpc", + ), + ( + ConfidentialComputingAsyncClient, + transports.ConfidentialComputingGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + ConfidentialComputingClient, + transports.ConfidentialComputingRestTransport, + "rest", + ), + ], +) +def test_confidential_computing_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ConfidentialComputingClient, + transports.ConfidentialComputingGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ConfidentialComputingAsyncClient, + transports.ConfidentialComputingGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + ConfidentialComputingClient, + transports.ConfidentialComputingRestTransport, + "rest", + None, + ), + ], +) +def test_confidential_computing_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_confidential_computing_client_client_options_from_dict(): + with mock.patch( + "google.cloud.confidentialcomputing_v1.services.confidential_computing.transports.ConfidentialComputingGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ConfidentialComputingClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ConfidentialComputingClient, + transports.ConfidentialComputingGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ConfidentialComputingAsyncClient, + transports.ConfidentialComputingGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_confidential_computing_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "confidentialcomputing.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="confidentialcomputing.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateChallengeRequest, + dict, + ], +) +def test_create_challenge(request_type, transport: str = "grpc"): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_challenge), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.Challenge( + name="name_value", + used=True, + tpm_nonce="tpm_nonce_value", + ) + response = client.create_challenge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateChallengeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Challenge) + assert response.name == "name_value" + assert response.used is True + assert response.tpm_nonce == "tpm_nonce_value" + + +def test_create_challenge_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_challenge), "__call__") as call: + client.create_challenge() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateChallengeRequest() + + +@pytest.mark.asyncio +async def test_create_challenge_async( + transport: str = "grpc_asyncio", request_type=service.CreateChallengeRequest +): + client = ConfidentialComputingAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_challenge), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.Challenge( + name="name_value", + used=True, + tpm_nonce="tpm_nonce_value", + ) + ) + response = await client.create_challenge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateChallengeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Challenge) + assert response.name == "name_value" + assert response.used is True + assert response.tpm_nonce == "tpm_nonce_value" + + +@pytest.mark.asyncio +async def test_create_challenge_async_from_dict(): + await test_create_challenge_async(request_type=dict) + + +def test_create_challenge_field_headers(): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateChallengeRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_challenge), "__call__") as call: + call.return_value = service.Challenge() + client.create_challenge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_challenge_field_headers_async(): + client = ConfidentialComputingAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateChallengeRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_challenge), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.Challenge()) + await client.create_challenge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_challenge_flattened(): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_challenge), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.Challenge() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_challenge( + parent="parent_value", + challenge=service.Challenge(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].challenge + mock_val = service.Challenge(name="name_value") + assert arg == mock_val + + +def test_create_challenge_flattened_error(): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_challenge( + service.CreateChallengeRequest(), + parent="parent_value", + challenge=service.Challenge(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_challenge_flattened_async(): + client = ConfidentialComputingAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_challenge), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.Challenge() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.Challenge()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_challenge( + parent="parent_value", + challenge=service.Challenge(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].challenge + mock_val = service.Challenge(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_challenge_flattened_error_async(): + client = ConfidentialComputingAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_challenge( + service.CreateChallengeRequest(), + parent="parent_value", + challenge=service.Challenge(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.VerifyAttestationRequest, + dict, + ], +) +def test_verify_attestation(request_type, transport: str = "grpc"): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_attestation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.VerifyAttestationResponse( + oidc_claims_token="oidc_claims_token_value", + ) + response = client.verify_attestation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.VerifyAttestationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.VerifyAttestationResponse) + assert response.oidc_claims_token == "oidc_claims_token_value" + + +def test_verify_attestation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_attestation), "__call__" + ) as call: + client.verify_attestation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.VerifyAttestationRequest() + + +@pytest.mark.asyncio +async def test_verify_attestation_async( + transport: str = "grpc_asyncio", request_type=service.VerifyAttestationRequest +): + client = ConfidentialComputingAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_attestation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.VerifyAttestationResponse( + oidc_claims_token="oidc_claims_token_value", + ) + ) + response = await client.verify_attestation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.VerifyAttestationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.VerifyAttestationResponse) + assert response.oidc_claims_token == "oidc_claims_token_value" + + +@pytest.mark.asyncio +async def test_verify_attestation_async_from_dict(): + await test_verify_attestation_async(request_type=dict) + + +def test_verify_attestation_field_headers(): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.VerifyAttestationRequest() + + request.challenge = "challenge_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_attestation), "__call__" + ) as call: + call.return_value = service.VerifyAttestationResponse() + client.verify_attestation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "challenge=challenge_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_verify_attestation_field_headers_async(): + client = ConfidentialComputingAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.VerifyAttestationRequest() + + request.challenge = "challenge_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.verify_attestation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.VerifyAttestationResponse() + ) + await client.verify_attestation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "challenge=challenge_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateChallengeRequest, + dict, + ], +) +def test_create_challenge_rest(request_type): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["challenge"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "expire_time": {}, + "used": True, + "tpm_nonce": "tpm_nonce_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.Challenge( + name="name_value", + used=True, + tpm_nonce="tpm_nonce_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.Challenge.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_challenge(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Challenge) + assert response.name == "name_value" + assert response.used is True + assert response.tpm_nonce == "tpm_nonce_value" + + +def test_create_challenge_rest_required_fields( + request_type=service.CreateChallengeRequest, +): + transport_class = transports.ConfidentialComputingRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_challenge._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_challenge._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.Challenge() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.Challenge.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_challenge(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_challenge_rest_unset_required_fields(): + transport = transports.ConfidentialComputingRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_challenge._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "challenge", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_challenge_rest_interceptors(null_interceptor): + transport = transports.ConfidentialComputingRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConfidentialComputingRestInterceptor(), + ) + client = ConfidentialComputingClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfidentialComputingRestInterceptor, "post_create_challenge" + ) as post, mock.patch.object( + transports.ConfidentialComputingRestInterceptor, "pre_create_challenge" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateChallengeRequest.pb(service.CreateChallengeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.Challenge.to_json(service.Challenge()) + + request = service.CreateChallengeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.Challenge() + + client.create_challenge( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_challenge_rest_bad_request( + transport: str = "rest", request_type=service.CreateChallengeRequest +): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["challenge"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "expire_time": {}, + "used": True, + "tpm_nonce": "tpm_nonce_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_challenge(request) + + +def test_create_challenge_rest_flattened(): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.Challenge() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + challenge=service.Challenge(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.Challenge.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_challenge(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/challenges" % client.transport._host, + args[1], + ) + + +def test_create_challenge_rest_flattened_error(transport: str = "rest"): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_challenge( + service.CreateChallengeRequest(), + parent="parent_value", + challenge=service.Challenge(name="name_value"), + ) + + +def test_create_challenge_rest_error(): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.VerifyAttestationRequest, + dict, + ], +) +def test_verify_attestation_rest(request_type): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "challenge": "projects/sample1/locations/sample2/challenges/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.VerifyAttestationResponse( + oidc_claims_token="oidc_claims_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.VerifyAttestationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.verify_attestation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.VerifyAttestationResponse) + assert response.oidc_claims_token == "oidc_claims_token_value" + + +def test_verify_attestation_rest_required_fields( + request_type=service.VerifyAttestationRequest, +): + transport_class = transports.ConfidentialComputingRestTransport + + request_init = {} + request_init["challenge"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).verify_attestation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["challenge"] = "challenge_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).verify_attestation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "challenge" in jsonified_request + assert jsonified_request["challenge"] == "challenge_value" + + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.VerifyAttestationResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.VerifyAttestationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.verify_attestation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_verify_attestation_rest_unset_required_fields(): + transport = transports.ConfidentialComputingRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.verify_attestation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "challenge", + "tpmAttestation", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_verify_attestation_rest_interceptors(null_interceptor): + transport = transports.ConfidentialComputingRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConfidentialComputingRestInterceptor(), + ) + client = ConfidentialComputingClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfidentialComputingRestInterceptor, "post_verify_attestation" + ) as post, mock.patch.object( + transports.ConfidentialComputingRestInterceptor, "pre_verify_attestation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.VerifyAttestationRequest.pb( + service.VerifyAttestationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.VerifyAttestationResponse.to_json( + service.VerifyAttestationResponse() + ) + + request = service.VerifyAttestationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.VerifyAttestationResponse() + + client.verify_attestation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_verify_attestation_rest_bad_request( + transport: str = "rest", request_type=service.VerifyAttestationRequest +): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "challenge": "projects/sample1/locations/sample2/challenges/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.verify_attestation(request) + + +def test_verify_attestation_rest_error(): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ConfidentialComputingGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ConfidentialComputingGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfidentialComputingClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ConfidentialComputingGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfidentialComputingClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfidentialComputingClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ConfidentialComputingGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfidentialComputingClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfidentialComputingGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ConfidentialComputingClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfidentialComputingGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ConfidentialComputingGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfidentialComputingGrpcTransport, + transports.ConfidentialComputingGrpcAsyncIOTransport, + transports.ConfidentialComputingRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ConfidentialComputingClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ConfidentialComputingGrpcTransport, + ) + + +def test_confidential_computing_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ConfidentialComputingTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_confidential_computing_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.confidentialcomputing_v1.services.confidential_computing.transports.ConfidentialComputingTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ConfidentialComputingTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_challenge", + "verify_attestation", + "get_location", + "list_locations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_confidential_computing_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.confidentialcomputing_v1.services.confidential_computing.transports.ConfidentialComputingTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfidentialComputingTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_confidential_computing_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.confidentialcomputing_v1.services.confidential_computing.transports.ConfidentialComputingTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfidentialComputingTransport() + adc.assert_called_once() + + +def test_confidential_computing_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ConfidentialComputingClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfidentialComputingGrpcTransport, + transports.ConfidentialComputingGrpcAsyncIOTransport, + ], +) +def test_confidential_computing_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfidentialComputingGrpcTransport, + transports.ConfidentialComputingGrpcAsyncIOTransport, + transports.ConfidentialComputingRestTransport, + ], +) +def test_confidential_computing_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConfidentialComputingGrpcTransport, grpc_helpers), + (transports.ConfidentialComputingGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_confidential_computing_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "confidentialcomputing.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="confidentialcomputing.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfidentialComputingGrpcTransport, + transports.ConfidentialComputingGrpcAsyncIOTransport, + ], +) +def test_confidential_computing_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_confidential_computing_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ConfidentialComputingRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_confidential_computing_host_no_port(transport_name): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="confidentialcomputing.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "confidentialcomputing.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://confidentialcomputing.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_confidential_computing_host_with_port(transport_name): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="confidentialcomputing.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "confidentialcomputing.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://confidentialcomputing.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_confidential_computing_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ConfidentialComputingClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ConfidentialComputingClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_challenge._session + session2 = client2.transport.create_challenge._session + assert session1 != session2 + session1 = client1.transport.verify_attestation._session + session2 = client2.transport.verify_attestation._session + assert session1 != session2 + + +def test_confidential_computing_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConfidentialComputingGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_confidential_computing_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConfidentialComputingGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfidentialComputingGrpcTransport, + transports.ConfidentialComputingGrpcAsyncIOTransport, + ], +) +def test_confidential_computing_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfidentialComputingGrpcTransport, + transports.ConfidentialComputingGrpcAsyncIOTransport, + ], +) +def test_confidential_computing_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_challenge_path(): + project = "squid" + location = "clam" + uuid = "whelk" + expected = "projects/{project}/locations/{location}/challenges/{uuid}".format( + project=project, + location=location, + uuid=uuid, + ) + actual = ConfidentialComputingClient.challenge_path(project, location, uuid) + assert expected == actual + + +def test_parse_challenge_path(): + expected = { + "project": "octopus", + "location": "oyster", + "uuid": "nudibranch", + } + path = ConfidentialComputingClient.challenge_path(**expected) + + # Check that the path construction is reversible. + actual = ConfidentialComputingClient.parse_challenge_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ConfidentialComputingClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = ConfidentialComputingClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ConfidentialComputingClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ConfidentialComputingClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = ConfidentialComputingClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ConfidentialComputingClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ConfidentialComputingClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = ConfidentialComputingClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ConfidentialComputingClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = ConfidentialComputingClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = ConfidentialComputingClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ConfidentialComputingClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ConfidentialComputingClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = ConfidentialComputingClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ConfidentialComputingClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ConfidentialComputingTransport, "_prep_wrapped_messages" + ) as prep: + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ConfidentialComputingTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ConfidentialComputingClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ConfidentialComputingAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations(transport: str = "grpc"): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = ConfidentialComputingAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = ConfidentialComputingAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = ConfidentialComputingAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = ConfidentialComputingAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = ConfidentialComputingAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = ConfidentialComputingAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ConfidentialComputingClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ConfidentialComputingClient, transports.ConfidentialComputingGrpcTransport), + ( + ConfidentialComputingAsyncClient, + transports.ConfidentialComputingGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-contentwarehouse/.coveragerc b/packages/google-cloud-contentwarehouse/.coveragerc index e9114b2c68df..a1299b0a58fe 100644 --- a/packages/google-cloud-contentwarehouse/.coveragerc +++ b/packages/google-cloud-contentwarehouse/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/cloud/contentwarehouse/__init__.py + google/cloud/contentwarehouse/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/google-cloud-contentwarehouse/CHANGELOG.md b/packages/google-cloud-contentwarehouse/CHANGELOG.md index 1db42e7f47fd..a802ac0fbb53 100644 --- a/packages/google-cloud-contentwarehouse/CHANGELOG.md +++ b/packages/google-cloud-contentwarehouse/CHANGELOG.md @@ -1,5 +1,29 @@ # Changelog +## [0.5.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contentwarehouse-v0.4.1...google-cloud-contentwarehouse-v0.5.0) (2023-04-05) + + +### Features + +* add `content_category`, `text_extraction_enabled`, `retrieval_importance`, `schema_sources`, `total_result_size` fields ([80a9ff4](https://github.com/googleapis/google-cloud-python/commit/80a9ff4a68b3c40696e95d2b1faecea7c8397a5b)) +* add `DocumentCreatorDefaultRole`, `ContentCategory`, `RetrievalImportance`, `SchemaSource`, `TotalResultSize`, `LockDocumentRequest`, `CustomWeightsMetadata`, `WeightedSchemaProperty` ([80a9ff4](https://github.com/googleapis/google-cloud-python/commit/80a9ff4a68b3c40696e95d2b1faecea7c8397a5b)) +* add LockDocument service ([80a9ff4](https://github.com/googleapis/google-cloud-python/commit/80a9ff4a68b3c40696e95d2b1faecea7c8397a5b)) +* deprecate `text_extraction_disabled`, `structured_content_uri`, `async_enabled` field ([80a9ff4](https://github.com/googleapis/google-cloud-python/commit/80a9ff4a68b3c40696e95d2b1faecea7c8397a5b)) + +## [0.4.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contentwarehouse-v0.4.0...google-cloud-contentwarehouse-v0.4.1) (2023-03-25) + + +### Documentation + +* Fix formatting of request arg in docstring ([#10867](https://github.com/googleapis/google-cloud-python/issues/10867)) ([d34a425](https://github.com/googleapis/google-cloud-python/commit/d34a425f7d0f02bebaf20d24b725b8c25c699697)) + +## [0.4.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contentwarehouse-v0.3.1...google-cloud-contentwarehouse-v0.4.0) (2023-02-09) + + +### Features + +* enable "rest" transport in Python for services supporting numeric enums ([#10839](https://github.com/googleapis/google-cloud-python/issues/10839)) ([ad59d56](https://github.com/googleapis/google-cloud-python/commit/ad59d569bda339ed31500602e2db369afdbfcf0b)) + ## [0.3.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contentwarehouse-v0.3.0...google-cloud-contentwarehouse-v0.3.1) (2023-01-20) diff --git a/packages/google-cloud-contentwarehouse/contentwarehouse-v1-py.tar.gz b/packages/google-cloud-contentwarehouse/contentwarehouse-v1-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/__init__.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/__init__.py index 53f517669a12..c2b8cb3d2ce0 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/__init__.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/__init__.py @@ -55,6 +55,7 @@ from google.cloud.contentwarehouse_v1.types.common import ( AccessControlMode, DatabaseType, + DocumentCreatorDefaultRole, MergeFieldsOptions, RequestMetadata, ResponseMetadata, @@ -63,6 +64,7 @@ UserInfo, ) from google.cloud.contentwarehouse_v1.types.document import ( + ContentCategory, DateTimeArray, Document, DocumentReference, @@ -122,15 +124,18 @@ DeleteDocumentRequest, FetchAclRequest, GetDocumentRequest, + LockDocumentRequest, SearchDocumentsRequest, SetAclRequest, UpdateDocumentRequest, ) from google.cloud.contentwarehouse_v1.types.filters import ( + CustomWeightsMetadata, DocumentQuery, FileTypeFilter, PropertyFilter, TimeFilter, + WeightedSchemaProperty, ) from google.cloud.contentwarehouse_v1.types.histogram import ( HistogramQuery, @@ -193,6 +198,7 @@ "UserInfo", "AccessControlMode", "DatabaseType", + "DocumentCreatorDefaultRole", "UpdateType", "DateTimeArray", "Document", @@ -208,6 +214,7 @@ "TimestampArray", "TimestampValue", "Value", + "ContentCategory", "RawDocumentFileType", "CreateDocumentLinkRequest", "DeleteDocumentLinkRequest", @@ -243,13 +250,16 @@ "DeleteDocumentRequest", "FetchAclRequest", "GetDocumentRequest", + "LockDocumentRequest", "SearchDocumentsRequest", "SetAclRequest", "UpdateDocumentRequest", + "CustomWeightsMetadata", "DocumentQuery", "FileTypeFilter", "PropertyFilter", "TimeFilter", + "WeightedSchemaProperty", "HistogramQuery", "HistogramQueryPropertyNameFilter", "HistogramQueryResult", diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py index 056efd287f1d..5aee1cff6e11 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.3.1" # {x-release-please-version} +__version__ = "0.5.0" # {x-release-please-version} diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/__init__.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/__init__.py index bbde05ab3d16..13f36d40860b 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/__init__.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.contentwarehouse import gapic_version as package_version +from google.cloud.contentwarehouse_v1 import gapic_version as package_version __version__ = package_version.__version__ @@ -39,6 +39,7 @@ from .types.common import ( AccessControlMode, DatabaseType, + DocumentCreatorDefaultRole, MergeFieldsOptions, RequestMetadata, ResponseMetadata, @@ -47,6 +48,7 @@ UserInfo, ) from .types.document import ( + ContentCategory, DateTimeArray, Document, DocumentReference, @@ -106,11 +108,19 @@ DeleteDocumentRequest, FetchAclRequest, GetDocumentRequest, + LockDocumentRequest, SearchDocumentsRequest, SetAclRequest, UpdateDocumentRequest, ) -from .types.filters import DocumentQuery, FileTypeFilter, PropertyFilter, TimeFilter +from .types.filters import ( + CustomWeightsMetadata, + DocumentQuery, + FileTypeFilter, + PropertyFilter, + TimeFilter, + WeightedSchemaProperty, +) from .types.histogram import ( HistogramQuery, HistogramQueryPropertyNameFilter, @@ -165,6 +175,7 @@ "ActionOutput", "AddToFolderAction", "CloudAIDocumentOption", + "ContentCategory", "CreateDocumentLinkRequest", "CreateDocumentMetadata", "CreateDocumentRequest", @@ -172,6 +183,7 @@ "CreateDocumentSchemaRequest", "CreateRuleSetRequest", "CreateSynonymSetRequest", + "CustomWeightsMetadata", "DataUpdateAction", "DataValidationAction", "DatabaseType", @@ -184,6 +196,7 @@ "DeleteRuleSetRequest", "DeleteSynonymSetRequest", "Document", + "DocumentCreatorDefaultRole", "DocumentLink", "DocumentLinkServiceClient", "DocumentQuery", @@ -219,6 +232,7 @@ "ListRuleSetsResponse", "ListSynonymSetsRequest", "ListSynonymSetsResponse", + "LockDocumentRequest", "MapProperty", "MapTypeOptions", "MergeFieldsOptions", @@ -261,4 +275,5 @@ "UpdateType", "UserInfo", "Value", + "WeightedSchemaProperty", ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_metadata.json b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_metadata.json index bc3810fa15f0..6420c7498436 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_metadata.json +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_metadata.json @@ -56,6 +56,31 @@ ] } } + }, + "rest": { + "libraryClient": "DocumentLinkServiceClient", + "rpcs": { + "CreateDocumentLink": { + "methods": [ + "create_document_link" + ] + }, + "DeleteDocumentLink": { + "methods": [ + "delete_document_link" + ] + }, + "ListLinkedSources": { + "methods": [ + "list_linked_sources" + ] + }, + "ListLinkedTargets": { + "methods": [ + "list_linked_targets" + ] + } + } } } }, @@ -120,6 +145,36 @@ ] } } + }, + "rest": { + "libraryClient": "DocumentSchemaServiceClient", + "rpcs": { + "CreateDocumentSchema": { + "methods": [ + "create_document_schema" + ] + }, + "DeleteDocumentSchema": { + "methods": [ + "delete_document_schema" + ] + }, + "GetDocumentSchema": { + "methods": [ + "get_document_schema" + ] + }, + "ListDocumentSchemas": { + "methods": [ + "list_document_schemas" + ] + }, + "UpdateDocumentSchema": { + "methods": [ + "update_document_schema" + ] + } + } } } }, @@ -148,6 +203,11 @@ "get_document" ] }, + "LockDocument": { + "methods": [ + "lock_document" + ] + }, "SearchDocuments": { "methods": [ "search_documents" @@ -188,6 +248,56 @@ "get_document" ] }, + "LockDocument": { + "methods": [ + "lock_document" + ] + }, + "SearchDocuments": { + "methods": [ + "search_documents" + ] + }, + "SetAcl": { + "methods": [ + "set_acl" + ] + }, + "UpdateDocument": { + "methods": [ + "update_document" + ] + } + } + }, + "rest": { + "libraryClient": "DocumentServiceClient", + "rpcs": { + "CreateDocument": { + "methods": [ + "create_document" + ] + }, + "DeleteDocument": { + "methods": [ + "delete_document" + ] + }, + "FetchAcl": { + "methods": [ + "fetch_acl" + ] + }, + "GetDocument": { + "methods": [ + "get_document" + ] + }, + "LockDocument": { + "methods": [ + "lock_document" + ] + }, "SearchDocuments": { "methods": [ "search_documents" @@ -268,6 +378,36 @@ ] } } + }, + "rest": { + "libraryClient": "RuleSetServiceClient", + "rpcs": { + "CreateRuleSet": { + "methods": [ + "create_rule_set" + ] + }, + "DeleteRuleSet": { + "methods": [ + "delete_rule_set" + ] + }, + "GetRuleSet": { + "methods": [ + "get_rule_set" + ] + }, + "ListRuleSets": { + "methods": [ + "list_rule_sets" + ] + }, + "UpdateRuleSet": { + "methods": [ + "update_rule_set" + ] + } + } } } }, @@ -332,6 +472,36 @@ ] } } + }, + "rest": { + "libraryClient": "SynonymSetServiceClient", + "rpcs": { + "CreateSynonymSet": { + "methods": [ + "create_synonym_set" + ] + }, + "DeleteSynonymSet": { + "methods": [ + "delete_synonym_set" + ] + }, + "GetSynonymSet": { + "methods": [ + "get_synonym_set" + ] + }, + "ListSynonymSets": { + "methods": [ + "list_synonym_sets" + ] + }, + "UpdateSynonymSet": { + "methods": [ + "update_synonym_set" + ] + } + } } } } diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py index e94c45aea923..5aee1cff6e11 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.2.0" # {x-release-please-version} +__version__ = "0.5.0" # {x-release-please-version} diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py index 865f25eb9262..e65a1fe0495e 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py @@ -658,6 +658,60 @@ async def sample_delete_document_link(): metadata=metadata, ) + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py index 7e9281e826c2..cedbb0625ec6 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py @@ -55,6 +55,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, DocumentLinkServiceTransport from .transports.grpc import DocumentLinkServiceGrpcTransport from .transports.grpc_asyncio import DocumentLinkServiceGrpcAsyncIOTransport +from .transports.rest import DocumentLinkServiceRestTransport class DocumentLinkServiceClientMeta(type): @@ -70,6 +71,7 @@ class DocumentLinkServiceClientMeta(type): ) # type: Dict[str, Type[DocumentLinkServiceTransport]] _transport_registry["grpc"] = DocumentLinkServiceGrpcTransport _transport_registry["grpc_asyncio"] = DocumentLinkServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DocumentLinkServiceRestTransport def get_transport_class( cls, @@ -917,6 +919,60 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/__init__.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/__init__.py index 343e159c82d4..bd06f2b7a458 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/__init__.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/__init__.py @@ -19,6 +19,7 @@ from .base import DocumentLinkServiceTransport from .grpc import DocumentLinkServiceGrpcTransport from .grpc_asyncio import DocumentLinkServiceGrpcAsyncIOTransport +from .rest import DocumentLinkServiceRestInterceptor, DocumentLinkServiceRestTransport # Compile a registry of transports. _transport_registry = ( @@ -26,9 +27,12 @@ ) # type: Dict[str, Type[DocumentLinkServiceTransport]] _transport_registry["grpc"] = DocumentLinkServiceGrpcTransport _transport_registry["grpc_asyncio"] = DocumentLinkServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DocumentLinkServiceRestTransport __all__ = ( "DocumentLinkServiceTransport", "DocumentLinkServiceGrpcTransport", "DocumentLinkServiceGrpcAsyncIOTransport", + "DocumentLinkServiceRestTransport", + "DocumentLinkServiceRestInterceptor", ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/base.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/base.py index 302838680d5d..0fd01099fd35 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/base.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/base.py @@ -200,6 +200,15 @@ def delete_document_link( ]: raise NotImplementedError() + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc.py index 636e50535fbd..3eae4230303d 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc.py @@ -350,6 +350,23 @@ def delete_document_link( def close(self): self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc_asyncio.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc_asyncio.py index 51483884e578..fa334b5981e8 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc_asyncio.py @@ -355,5 +355,22 @@ def delete_document_link( def close(self): return self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + __all__ = ("DocumentLinkServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest.py new file mode 100644 index 000000000000..19da82ba0017 --- /dev/null +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest.py @@ -0,0 +1,816 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.contentwarehouse_v1.types import document_link_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DocumentLinkServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DocumentLinkServiceRestInterceptor: + """Interceptor for DocumentLinkService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DocumentLinkServiceRestTransport. + + .. code-block:: python + class MyCustomDocumentLinkServiceInterceptor(DocumentLinkServiceRestInterceptor): + def pre_create_document_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_document_link(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_document_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_list_linked_sources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_linked_sources(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_linked_targets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_linked_targets(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DocumentLinkServiceRestTransport(interceptor=MyCustomDocumentLinkServiceInterceptor()) + client = DocumentLinkServiceClient(transport=transport) + + + """ + + def pre_create_document_link( + self, + request: document_link_service.CreateDocumentLinkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_link_service.CreateDocumentLinkRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_document_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentLinkService server. + """ + return request, metadata + + def post_create_document_link( + self, response: document_link_service.DocumentLink + ) -> document_link_service.DocumentLink: + """Post-rpc interceptor for create_document_link + + Override in a subclass to manipulate the response + after it is returned by the DocumentLinkService server but before + it is returned to user code. + """ + return response + + def pre_delete_document_link( + self, + request: document_link_service.DeleteDocumentLinkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_link_service.DeleteDocumentLinkRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_document_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentLinkService server. + """ + return request, metadata + + def pre_list_linked_sources( + self, + request: document_link_service.ListLinkedSourcesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_link_service.ListLinkedSourcesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_linked_sources + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentLinkService server. + """ + return request, metadata + + def post_list_linked_sources( + self, response: document_link_service.ListLinkedSourcesResponse + ) -> document_link_service.ListLinkedSourcesResponse: + """Post-rpc interceptor for list_linked_sources + + Override in a subclass to manipulate the response + after it is returned by the DocumentLinkService server but before + it is returned to user code. + """ + return response + + def pre_list_linked_targets( + self, + request: document_link_service.ListLinkedTargetsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_link_service.ListLinkedTargetsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_linked_targets + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentLinkService server. + """ + return request, metadata + + def post_list_linked_targets( + self, response: document_link_service.ListLinkedTargetsResponse + ) -> document_link_service.ListLinkedTargetsResponse: + """Post-rpc interceptor for list_linked_targets + + Override in a subclass to manipulate the response + after it is returned by the DocumentLinkService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentLinkService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DocumentLinkService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DocumentLinkServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DocumentLinkServiceRestInterceptor + + +class DocumentLinkServiceRestTransport(DocumentLinkServiceTransport): + """REST backend transport for DocumentLinkService. + + This service lets you manage document-links. + Document-Links are treated as sub-resources under source + documents. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "contentwarehouse.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DocumentLinkServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DocumentLinkServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateDocumentLink(DocumentLinkServiceRestStub): + def __hash__(self): + return hash("CreateDocumentLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_link_service.CreateDocumentLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_link_service.DocumentLink: + r"""Call the create document link method over HTTP. + + Args: + request (~.document_link_service.CreateDocumentLinkRequest): + The request object. Request message for + DocumentLinkService.CreateDocumentLink. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_link_service.DocumentLink: + A document-link between source and + target document. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/documents/*}/documentLinks", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_document_link( + request, metadata + ) + pb_request = document_link_service.CreateDocumentLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_link_service.DocumentLink() + pb_resp = document_link_service.DocumentLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_document_link(resp) + return resp + + class _DeleteDocumentLink(DocumentLinkServiceRestStub): + def __hash__(self): + return hash("DeleteDocumentLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_link_service.DeleteDocumentLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete document link method over HTTP. + + Args: + request (~.document_link_service.DeleteDocumentLinkRequest): + The request object. Request message for + DocumentLinkService.DeleteDocumentLink. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/documents/*/documentLinks/*}:delete", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_delete_document_link( + request, metadata + ) + pb_request = document_link_service.DeleteDocumentLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _ListLinkedSources(DocumentLinkServiceRestStub): + def __hash__(self): + return hash("ListLinkedSources") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_link_service.ListLinkedSourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_link_service.ListLinkedSourcesResponse: + r"""Call the list linked sources method over HTTP. + + Args: + request (~.document_link_service.ListLinkedSourcesRequest): + The request object. Response message for + DocumentLinkService.ListLinkedSources. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_link_service.ListLinkedSourcesResponse: + Response message for + DocumentLinkService.ListLinkedSources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/documents/*}/linkedSources", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_list_linked_sources( + request, metadata + ) + pb_request = document_link_service.ListLinkedSourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_link_service.ListLinkedSourcesResponse() + pb_resp = document_link_service.ListLinkedSourcesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_linked_sources(resp) + return resp + + class _ListLinkedTargets(DocumentLinkServiceRestStub): + def __hash__(self): + return hash("ListLinkedTargets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_link_service.ListLinkedTargetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_link_service.ListLinkedTargetsResponse: + r"""Call the list linked targets method over HTTP. + + Args: + request (~.document_link_service.ListLinkedTargetsRequest): + The request object. Request message for + DocumentLinkService.ListLinkedTargets. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_link_service.ListLinkedTargetsResponse: + Response message for + DocumentLinkService.ListLinkedTargets. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/documents/*}/linkedTargets", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_list_linked_targets( + request, metadata + ) + pb_request = document_link_service.ListLinkedTargetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_link_service.ListLinkedTargetsResponse() + pb_resp = document_link_service.ListLinkedTargetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_linked_targets(resp) + return resp + + @property + def create_document_link( + self, + ) -> Callable[ + [document_link_service.CreateDocumentLinkRequest], + document_link_service.DocumentLink, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDocumentLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_document_link( + self, + ) -> Callable[[document_link_service.DeleteDocumentLinkRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDocumentLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_linked_sources( + self, + ) -> Callable[ + [document_link_service.ListLinkedSourcesRequest], + document_link_service.ListLinkedSourcesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListLinkedSources(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_linked_targets( + self, + ) -> Callable[ + [document_link_service.ListLinkedTargetsRequest], + document_link_service.ListLinkedTargetsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListLinkedTargets(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(DocumentLinkServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DocumentLinkServiceRestTransport",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py index 9dd98660ebe8..bf20362dd515 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py @@ -806,6 +806,60 @@ async def sample_list_document_schemas(): # Done; return the response. return response + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py index e539fa0acd58..4b3c6d9aade2 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py @@ -59,6 +59,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, DocumentSchemaServiceTransport from .transports.grpc import DocumentSchemaServiceGrpcTransport from .transports.grpc_asyncio import DocumentSchemaServiceGrpcAsyncIOTransport +from .transports.rest import DocumentSchemaServiceRestTransport class DocumentSchemaServiceClientMeta(type): @@ -74,6 +75,7 @@ class DocumentSchemaServiceClientMeta(type): ) # type: Dict[str, Type[DocumentSchemaServiceTransport]] _transport_registry["grpc"] = DocumentSchemaServiceGrpcTransport _transport_registry["grpc_asyncio"] = DocumentSchemaServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DocumentSchemaServiceRestTransport def get_transport_class( cls, @@ -1036,6 +1038,60 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/__init__.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/__init__.py index 1a7bd15f33e1..25bf41fb6407 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/__init__.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/__init__.py @@ -19,6 +19,10 @@ from .base import DocumentSchemaServiceTransport from .grpc import DocumentSchemaServiceGrpcTransport from .grpc_asyncio import DocumentSchemaServiceGrpcAsyncIOTransport +from .rest import ( + DocumentSchemaServiceRestInterceptor, + DocumentSchemaServiceRestTransport, +) # Compile a registry of transports. _transport_registry = ( @@ -26,9 +30,12 @@ ) # type: Dict[str, Type[DocumentSchemaServiceTransport]] _transport_registry["grpc"] = DocumentSchemaServiceGrpcTransport _transport_registry["grpc_asyncio"] = DocumentSchemaServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DocumentSchemaServiceRestTransport __all__ = ( "DocumentSchemaServiceTransport", "DocumentSchemaServiceGrpcTransport", "DocumentSchemaServiceGrpcAsyncIOTransport", + "DocumentSchemaServiceRestTransport", + "DocumentSchemaServiceRestInterceptor", ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/base.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/base.py index 3ee723f4d045..5904e50519ca 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/base.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/base.py @@ -238,6 +238,15 @@ def list_document_schemas( ]: raise NotImplementedError() + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc.py index 6744feec9da9..d22be9836929 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc.py @@ -390,6 +390,23 @@ def list_document_schemas( def close(self): self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc_asyncio.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc_asyncio.py index 45d02db759bc..cd097f59cc2c 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc_asyncio.py @@ -394,5 +394,22 @@ def list_document_schemas( def close(self): return self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + __all__ = ("DocumentSchemaServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest.py new file mode 100644 index 000000000000..1ce9bc08a645 --- /dev/null +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest.py @@ -0,0 +1,937 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.contentwarehouse_v1.types import ( + document_schema as gcc_document_schema, +) +from google.cloud.contentwarehouse_v1.types import document_schema +from google.cloud.contentwarehouse_v1.types import document_schema_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DocumentSchemaServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DocumentSchemaServiceRestInterceptor: + """Interceptor for DocumentSchemaService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DocumentSchemaServiceRestTransport. + + .. code-block:: python + class MyCustomDocumentSchemaServiceInterceptor(DocumentSchemaServiceRestInterceptor): + def pre_create_document_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_document_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_document_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_document_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_document_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_document_schemas(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_document_schemas(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_document_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_document_schema(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DocumentSchemaServiceRestTransport(interceptor=MyCustomDocumentSchemaServiceInterceptor()) + client = DocumentSchemaServiceClient(transport=transport) + + + """ + + def pre_create_document_schema( + self, + request: document_schema_service.CreateDocumentSchemaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_schema_service.CreateDocumentSchemaRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_document_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentSchemaService server. + """ + return request, metadata + + def post_create_document_schema( + self, response: gcc_document_schema.DocumentSchema + ) -> gcc_document_schema.DocumentSchema: + """Post-rpc interceptor for create_document_schema + + Override in a subclass to manipulate the response + after it is returned by the DocumentSchemaService server but before + it is returned to user code. + """ + return response + + def pre_delete_document_schema( + self, + request: document_schema_service.DeleteDocumentSchemaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_schema_service.DeleteDocumentSchemaRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_document_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentSchemaService server. + """ + return request, metadata + + def pre_get_document_schema( + self, + request: document_schema_service.GetDocumentSchemaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_schema_service.GetDocumentSchemaRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_document_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentSchemaService server. + """ + return request, metadata + + def post_get_document_schema( + self, response: document_schema.DocumentSchema + ) -> document_schema.DocumentSchema: + """Post-rpc interceptor for get_document_schema + + Override in a subclass to manipulate the response + after it is returned by the DocumentSchemaService server but before + it is returned to user code. + """ + return response + + def pre_list_document_schemas( + self, + request: document_schema_service.ListDocumentSchemasRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_schema_service.ListDocumentSchemasRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_document_schemas + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentSchemaService server. + """ + return request, metadata + + def post_list_document_schemas( + self, response: document_schema_service.ListDocumentSchemasResponse + ) -> document_schema_service.ListDocumentSchemasResponse: + """Post-rpc interceptor for list_document_schemas + + Override in a subclass to manipulate the response + after it is returned by the DocumentSchemaService server but before + it is returned to user code. + """ + return response + + def pre_update_document_schema( + self, + request: document_schema_service.UpdateDocumentSchemaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_schema_service.UpdateDocumentSchemaRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_document_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentSchemaService server. + """ + return request, metadata + + def post_update_document_schema( + self, response: gcc_document_schema.DocumentSchema + ) -> gcc_document_schema.DocumentSchema: + """Post-rpc interceptor for update_document_schema + + Override in a subclass to manipulate the response + after it is returned by the DocumentSchemaService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentSchemaService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DocumentSchemaService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DocumentSchemaServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DocumentSchemaServiceRestInterceptor + + +class DocumentSchemaServiceRestTransport(DocumentSchemaServiceTransport): + """REST backend transport for DocumentSchemaService. + + This service lets you manage document schema. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "contentwarehouse.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DocumentSchemaServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DocumentSchemaServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateDocumentSchema(DocumentSchemaServiceRestStub): + def __hash__(self): + return hash("CreateDocumentSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_schema_service.CreateDocumentSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcc_document_schema.DocumentSchema: + r"""Call the create document schema method over HTTP. + + Args: + request (~.document_schema_service.CreateDocumentSchemaRequest): + The request object. Request message for + DocumentSchemaService.CreateDocumentSchema. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcc_document_schema.DocumentSchema: + A document schema used to define + document structure. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/documentSchemas", + "body": "document_schema", + }, + ] + request, metadata = self._interceptor.pre_create_document_schema( + request, metadata + ) + pb_request = document_schema_service.CreateDocumentSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcc_document_schema.DocumentSchema() + pb_resp = gcc_document_schema.DocumentSchema.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_document_schema(resp) + return resp + + class _DeleteDocumentSchema(DocumentSchemaServiceRestStub): + def __hash__(self): + return hash("DeleteDocumentSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_schema_service.DeleteDocumentSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete document schema method over HTTP. + + Args: + request (~.document_schema_service.DeleteDocumentSchemaRequest): + The request object. Request message for + DocumentSchemaService.DeleteDocumentSchema. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/documentSchemas/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_document_schema( + request, metadata + ) + pb_request = document_schema_service.DeleteDocumentSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetDocumentSchema(DocumentSchemaServiceRestStub): + def __hash__(self): + return hash("GetDocumentSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_schema_service.GetDocumentSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_schema.DocumentSchema: + r"""Call the get document schema method over HTTP. + + Args: + request (~.document_schema_service.GetDocumentSchemaRequest): + The request object. Request message for + DocumentSchemaService.GetDocumentSchema. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_schema.DocumentSchema: + A document schema used to define + document structure. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/documentSchemas/*}", + }, + ] + request, metadata = self._interceptor.pre_get_document_schema( + request, metadata + ) + pb_request = document_schema_service.GetDocumentSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_schema.DocumentSchema() + pb_resp = document_schema.DocumentSchema.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_document_schema(resp) + return resp + + class _ListDocumentSchemas(DocumentSchemaServiceRestStub): + def __hash__(self): + return hash("ListDocumentSchemas") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_schema_service.ListDocumentSchemasRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_schema_service.ListDocumentSchemasResponse: + r"""Call the list document schemas method over HTTP. + + Args: + request (~.document_schema_service.ListDocumentSchemasRequest): + The request object. Request message for + DocumentSchemaService.ListDocumentSchemas. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_schema_service.ListDocumentSchemasResponse: + Response message for + DocumentSchemaService.ListDocumentSchemas. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/documentSchemas", + }, + ] + request, metadata = self._interceptor.pre_list_document_schemas( + request, metadata + ) + pb_request = document_schema_service.ListDocumentSchemasRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_schema_service.ListDocumentSchemasResponse() + pb_resp = document_schema_service.ListDocumentSchemasResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_document_schemas(resp) + return resp + + class _UpdateDocumentSchema(DocumentSchemaServiceRestStub): + def __hash__(self): + return hash("UpdateDocumentSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_schema_service.UpdateDocumentSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcc_document_schema.DocumentSchema: + r"""Call the update document schema method over HTTP. + + Args: + request (~.document_schema_service.UpdateDocumentSchemaRequest): + The request object. Request message for + DocumentSchemaService.UpdateDocumentSchema. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcc_document_schema.DocumentSchema: + A document schema used to define + document structure. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{name=projects/*/locations/*/documentSchemas/*}", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_update_document_schema( + request, metadata + ) + pb_request = document_schema_service.UpdateDocumentSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcc_document_schema.DocumentSchema() + pb_resp = gcc_document_schema.DocumentSchema.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_document_schema(resp) + return resp + + @property + def create_document_schema( + self, + ) -> Callable[ + [document_schema_service.CreateDocumentSchemaRequest], + gcc_document_schema.DocumentSchema, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDocumentSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_document_schema( + self, + ) -> Callable[ + [document_schema_service.DeleteDocumentSchemaRequest], empty_pb2.Empty + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDocumentSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_document_schema( + self, + ) -> Callable[ + [document_schema_service.GetDocumentSchemaRequest], + document_schema.DocumentSchema, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDocumentSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_document_schemas( + self, + ) -> Callable[ + [document_schema_service.ListDocumentSchemasRequest], + document_schema_service.ListDocumentSchemasResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDocumentSchemas(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_document_schema( + self, + ) -> Callable[ + [document_schema_service.UpdateDocumentSchemaRequest], + gcc_document_schema.DocumentSchema, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDocumentSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(DocumentSchemaServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DocumentSchemaServiceRestTransport",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/async_client.py index 2431d603beef..7ad5fcccd491 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/async_client.py @@ -42,8 +42,9 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.cloud.documentai_v1.types import document as gcd_document from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.contentwarehouse_v1.services.document_service import pagers @@ -801,6 +802,111 @@ async def sample_search_documents(): # Done; return the response. return response + async def lock_document( + self, + request: Optional[ + Union[document_service_request.LockDocumentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcc_document.Document: + r"""Lock the document so the document cannot be updated + by other users. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contentwarehouse_v1 + + async def sample_lock_document(): + # Create a client + client = contentwarehouse_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = contentwarehouse_v1.LockDocumentRequest( + name="name_value", + ) + + # Make the request + response = await client.lock_document(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.contentwarehouse_v1.types.LockDocumentRequest, dict]]): + The request object. Request message for + DocumentService.LockDocument. + name (:class:`str`): + Required. The name of the document to lock. Format: + projects/{project_number}/locations/{location}/documents/{document}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.contentwarehouse_v1.types.Document: + Defines the structure for content + warehouse document proto. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = document_service_request.LockDocumentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.lock_document, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def fetch_acl( self, request: Optional[Union[document_service_request.FetchAclRequest, dict]] = None, @@ -848,6 +954,8 @@ async def sample_fetch_acl(): Required. REQUIRED: The resource for which the policy is being requested. Format for document: projects/{project_number}/locations/{location}/documents/{document_id}. + Format for collection: + projects/{project_number}/locations/{location}/collections/{collection_id}. Format for project: projects/{project_number}. This corresponds to the ``resource`` field @@ -963,6 +1071,8 @@ async def sample_set_acl(): Required. REQUIRED: The resource for which the policy is being requested. Format for document: projects/{project_number}/locations/{location}/documents/{document_id}. + Format for collection: + projects/{project_number}/locations/{location}/collections/{collection_id}. Format for project: projects/{project_number}. This corresponds to the ``resource`` field @@ -971,7 +1081,26 @@ async def sample_set_acl(): policy (:class:`google.iam.v1.policy_pb2.Policy`): Required. REQUIRED: The complete policy to be applied to the ``resource``. The size of the policy is limited to a - few 10s of KB. + few 10s of KB. This refers to an Identity and Access + (IAM) policy, which specifies access controls for the + Document. + + You can set ACL with condition for projects only. + + Supported operators are: ``=``, ``!=``, ``<``, ``<=``, + ``>``, and ``>=`` where the left of the operator is + ``DocumentSchemaId`` or property name and the right of + the operator is a number or a quoted string. You must + escape backslash (\) and quote (") characters. + + Boolean expressions (AND/OR) are supported up to 3 + levels of nesting (for example, "((A AND B AND C) OR D) + AND E"), a maximum of 10 comparisons are allowed in the + expression. The expression must be < 6000 bytes in + length. + + Sample condition: + ``"DocumentSchemaId = \"some schema id\" OR SchemaId.floatPropertyName >= 10"`` This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this @@ -1032,6 +1161,60 @@ async def sample_set_acl(): # Done; return the response. return response + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py index f85fbb2e154b..360d95d73185 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py @@ -46,8 +46,9 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.cloud.documentai_v1.types import document as gcd_document from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.contentwarehouse_v1.services.document_service import pagers @@ -62,6 +63,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, DocumentServiceTransport from .transports.grpc import DocumentServiceGrpcTransport from .transports.grpc_asyncio import DocumentServiceGrpcAsyncIOTransport +from .transports.rest import DocumentServiceRestTransport class DocumentServiceClientMeta(type): @@ -77,6 +79,7 @@ class DocumentServiceClientMeta(type): ) # type: Dict[str, Type[DocumentServiceTransport]] _transport_registry["grpc"] = DocumentServiceGrpcTransport _transport_registry["grpc_asyncio"] = DocumentServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DocumentServiceRestTransport def get_transport_class( cls, @@ -1052,6 +1055,111 @@ def sample_search_documents(): # Done; return the response. return response + def lock_document( + self, + request: Optional[ + Union[document_service_request.LockDocumentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcc_document.Document: + r"""Lock the document so the document cannot be updated + by other users. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contentwarehouse_v1 + + def sample_lock_document(): + # Create a client + client = contentwarehouse_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = contentwarehouse_v1.LockDocumentRequest( + name="name_value", + ) + + # Make the request + response = client.lock_document(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.contentwarehouse_v1.types.LockDocumentRequest, dict]): + The request object. Request message for + DocumentService.LockDocument. + name (str): + Required. The name of the document to lock. Format: + projects/{project_number}/locations/{location}/documents/{document}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.contentwarehouse_v1.types.Document: + Defines the structure for content + warehouse document proto. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a document_service_request.LockDocumentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, document_service_request.LockDocumentRequest): + request = document_service_request.LockDocumentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.lock_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def fetch_acl( self, request: Optional[Union[document_service_request.FetchAclRequest, dict]] = None, @@ -1099,6 +1207,8 @@ def sample_fetch_acl(): Required. REQUIRED: The resource for which the policy is being requested. Format for document: projects/{project_number}/locations/{location}/documents/{document_id}. + Format for collection: + projects/{project_number}/locations/{location}/collections/{collection_id}. Format for project: projects/{project_number}. This corresponds to the ``resource`` field @@ -1205,6 +1315,8 @@ def sample_set_acl(): Required. REQUIRED: The resource for which the policy is being requested. Format for document: projects/{project_number}/locations/{location}/documents/{document_id}. + Format for collection: + projects/{project_number}/locations/{location}/collections/{collection_id}. Format for project: projects/{project_number}. This corresponds to the ``resource`` field @@ -1213,7 +1325,26 @@ def sample_set_acl(): policy (google.iam.v1.policy_pb2.Policy): Required. REQUIRED: The complete policy to be applied to the ``resource``. The size of the policy is limited to a - few 10s of KB. + few 10s of KB. This refers to an Identity and Access + (IAM) policy, which specifies access controls for the + Document. + + You can set ACL with condition for projects only. + + Supported operators are: ``=``, ``!=``, ``<``, ``<=``, + ``>``, and ``>=`` where the left of the operator is + ``DocumentSchemaId`` or property name and the right of + the operator is a number or a quoted string. You must + escape backslash (\) and quote (") characters. + + Boolean expressions (AND/OR) are supported up to 3 + levels of nesting (for example, "((A AND B AND C) OR D) + AND E"), a maximum of 10 comparisons are allowed in the + expression. The expression must be < 6000 bytes in + length. + + Sample condition: + ``"DocumentSchemaId = \"some schema id\" OR SchemaId.floatPropertyName >= 10"`` This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this @@ -1287,6 +1418,60 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/__init__.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/__init__.py index dcf5db70137b..98aadd5d9ab4 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/__init__.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/__init__.py @@ -19,14 +19,18 @@ from .base import DocumentServiceTransport from .grpc import DocumentServiceGrpcTransport from .grpc_asyncio import DocumentServiceGrpcAsyncIOTransport +from .rest import DocumentServiceRestInterceptor, DocumentServiceRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[DocumentServiceTransport]] _transport_registry["grpc"] = DocumentServiceGrpcTransport _transport_registry["grpc_asyncio"] = DocumentServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DocumentServiceRestTransport __all__ = ( "DocumentServiceTransport", "DocumentServiceGrpcTransport", "DocumentServiceGrpcAsyncIOTransport", + "DocumentServiceRestTransport", + "DocumentServiceRestInterceptor", ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/base.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/base.py index d1e82cbb1c5a..bee1612dd8d2 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/base.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/base.py @@ -162,6 +162,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=180.0, client_info=client_info, ), + self.lock_document: gapic_v1.method.wrap_method( + self.lock_document, + default_timeout=None, + client_info=client_info, + ), self.fetch_acl: gapic_v1.method.wrap_method( self.fetch_acl, default_retry=retries.Retry( @@ -246,6 +251,15 @@ def search_documents( ]: raise NotImplementedError() + @property + def lock_document( + self, + ) -> Callable[ + [document_service_request.LockDocumentRequest], + Union[gcc_document.Document, Awaitable[gcc_document.Document]], + ]: + raise NotImplementedError() + @property def fetch_acl( self, @@ -269,6 +283,15 @@ def set_acl( ]: raise NotImplementedError() + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc.py index 775fb5a48291..eaeb4a2d90a2 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc.py @@ -379,6 +379,35 @@ def search_documents( ) return self._stubs["search_documents"] + @property + def lock_document( + self, + ) -> Callable[ + [document_service_request.LockDocumentRequest], gcc_document.Document + ]: + r"""Return a callable for the lock document method over gRPC. + + Lock the document so the document cannot be updated + by other users. + + Returns: + Callable[[~.LockDocumentRequest], + ~.Document]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lock_document" not in self._stubs: + self._stubs["lock_document"] = self.grpc_channel.unary_unary( + "/google.cloud.contentwarehouse.v1.DocumentService/LockDocument", + request_serializer=document_service_request.LockDocumentRequest.serialize, + response_deserializer=gcc_document.Document.deserialize, + ) + return self._stubs["lock_document"] + @property def fetch_acl( self, @@ -441,6 +470,23 @@ def set_acl( def close(self): self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc_asyncio.py index d5ce9f68b4fa..8eafd3cd5ab2 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc_asyncio.py @@ -386,6 +386,35 @@ def search_documents( ) return self._stubs["search_documents"] + @property + def lock_document( + self, + ) -> Callable[ + [document_service_request.LockDocumentRequest], Awaitable[gcc_document.Document] + ]: + r"""Return a callable for the lock document method over gRPC. + + Lock the document so the document cannot be updated + by other users. + + Returns: + Callable[[~.LockDocumentRequest], + Awaitable[~.Document]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lock_document" not in self._stubs: + self._stubs["lock_document"] = self.grpc_channel.unary_unary( + "/google.cloud.contentwarehouse.v1.DocumentService/LockDocument", + request_serializer=document_service_request.LockDocumentRequest.serialize, + response_deserializer=gcc_document.Document.deserialize, + ) + return self._stubs["lock_document"] + @property def fetch_acl( self, @@ -450,5 +479,22 @@ def set_acl( def close(self): return self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + __all__ = ("DocumentServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/rest.py new file mode 100644 index 000000000000..d49ab227b356 --- /dev/null +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/rest.py @@ -0,0 +1,1391 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.contentwarehouse_v1.types import ( + document_service, + document_service_request, +) +from google.cloud.contentwarehouse_v1.types import document as gcc_document + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DocumentServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DocumentServiceRestInterceptor: + """Interceptor for DocumentService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DocumentServiceRestTransport. + + .. code-block:: python + class MyCustomDocumentServiceInterceptor(DocumentServiceRestInterceptor): + def pre_create_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_fetch_acl(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_acl(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_lock_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_lock_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_search_documents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_documents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_acl(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_acl(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_document(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DocumentServiceRestTransport(interceptor=MyCustomDocumentServiceInterceptor()) + client = DocumentServiceClient(transport=transport) + + + """ + + def pre_create_document( + self, + request: document_service_request.CreateDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_service_request.CreateDocumentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_create_document( + self, response: document_service.CreateDocumentResponse + ) -> document_service.CreateDocumentResponse: + """Post-rpc interceptor for create_document + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + def pre_delete_document( + self, + request: document_service_request.DeleteDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_service_request.DeleteDocumentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def pre_fetch_acl( + self, + request: document_service_request.FetchAclRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[document_service_request.FetchAclRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_acl + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_fetch_acl( + self, response: document_service.FetchAclResponse + ) -> document_service.FetchAclResponse: + """Post-rpc interceptor for fetch_acl + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + def pre_get_document( + self, + request: document_service_request.GetDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[document_service_request.GetDocumentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_get_document( + self, response: gcc_document.Document + ) -> gcc_document.Document: + """Post-rpc interceptor for get_document + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + def pre_lock_document( + self, + request: document_service_request.LockDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[document_service_request.LockDocumentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for lock_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_lock_document( + self, response: gcc_document.Document + ) -> gcc_document.Document: + """Post-rpc interceptor for lock_document + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + def pre_search_documents( + self, + request: document_service_request.SearchDocumentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_service_request.SearchDocumentsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for search_documents + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_search_documents( + self, response: document_service.SearchDocumentsResponse + ) -> document_service.SearchDocumentsResponse: + """Post-rpc interceptor for search_documents + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + def pre_set_acl( + self, + request: document_service_request.SetAclRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[document_service_request.SetAclRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_acl + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_set_acl( + self, response: document_service.SetAclResponse + ) -> document_service.SetAclResponse: + """Post-rpc interceptor for set_acl + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + def pre_update_document( + self, + request: document_service_request.UpdateDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_service_request.UpdateDocumentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_update_document( + self, response: document_service.UpdateDocumentResponse + ) -> document_service.UpdateDocumentResponse: + """Post-rpc interceptor for update_document + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DocumentServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DocumentServiceRestInterceptor + + +class DocumentServiceRestTransport(DocumentServiceTransport): + """REST backend transport for DocumentService. + + This service lets you manage document. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "contentwarehouse.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DocumentServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DocumentServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateDocument(DocumentServiceRestStub): + def __hash__(self): + return hash("CreateDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service_request.CreateDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.CreateDocumentResponse: + r"""Call the create document method over HTTP. + + Args: + request (~.document_service_request.CreateDocumentRequest): + The request object. Request message for + DocumentService.CreateDocument. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.CreateDocumentResponse: + Response message for + DocumentService.CreateDocument. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/documents", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_document(request, metadata) + pb_request = document_service_request.CreateDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.CreateDocumentResponse() + pb_resp = document_service.CreateDocumentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_document(resp) + return resp + + class _DeleteDocument(DocumentServiceRestStub): + def __hash__(self): + return hash("DeleteDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service_request.DeleteDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete document method over HTTP. + + Args: + request (~.document_service_request.DeleteDocumentRequest): + The request object. Request message for + DocumentService.DeleteDocument. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/documents/*}:delete", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/documents/referenceId/*}:delete", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_delete_document(request, metadata) + pb_request = document_service_request.DeleteDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _FetchAcl(DocumentServiceRestStub): + def __hash__(self): + return hash("FetchAcl") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service_request.FetchAclRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.FetchAclResponse: + r"""Call the fetch acl method over HTTP. + + Args: + request (~.document_service_request.FetchAclRequest): + The request object. Request message for + DocumentService.FetchAcl + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.FetchAclResponse: + Response message for + DocumentService.FetchAcl. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/documents/*}:fetchAcl", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*}:fetchAcl", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_fetch_acl(request, metadata) + pb_request = document_service_request.FetchAclRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.FetchAclResponse() + pb_resp = document_service.FetchAclResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_acl(resp) + return resp + + class _GetDocument(DocumentServiceRestStub): + def __hash__(self): + return hash("GetDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service_request.GetDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcc_document.Document: + r"""Call the get document method over HTTP. + + Args: + request (~.document_service_request.GetDocumentRequest): + The request object. Request message for + DocumentService.GetDocument. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcc_document.Document: + Defines the structure for content + warehouse document proto. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/documents/*}:get", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/documents/referenceId/*}:get", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_get_document(request, metadata) + pb_request = document_service_request.GetDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcc_document.Document() + pb_resp = gcc_document.Document.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_document(resp) + return resp + + class _LockDocument(DocumentServiceRestStub): + def __hash__(self): + return hash("LockDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service_request.LockDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcc_document.Document: + r"""Call the lock document method over HTTP. + + Args: + request (~.document_service_request.LockDocumentRequest): + The request object. Request message for + DocumentService.LockDocument. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcc_document.Document: + Defines the structure for content + warehouse document proto. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/documents/*}:lock", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_lock_document(request, metadata) + pb_request = document_service_request.LockDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcc_document.Document() + pb_resp = gcc_document.Document.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_lock_document(resp) + return resp + + class _SearchDocuments(DocumentServiceRestStub): + def __hash__(self): + return hash("SearchDocuments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service_request.SearchDocumentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.SearchDocumentsResponse: + r"""Call the search documents method over HTTP. + + Args: + request (~.document_service_request.SearchDocumentsRequest): + The request object. Request message for + DocumentService.SearchDocuments. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.SearchDocumentsResponse: + Response message for + DocumentService.SearchDocuments. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/documents:search", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_search_documents( + request, metadata + ) + pb_request = document_service_request.SearchDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.SearchDocumentsResponse() + pb_resp = document_service.SearchDocumentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_documents(resp) + return resp + + class _SetAcl(DocumentServiceRestStub): + def __hash__(self): + return hash("SetAcl") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service_request.SetAclRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.SetAclResponse: + r"""Call the set acl method over HTTP. + + Args: + request (~.document_service_request.SetAclRequest): + The request object. Request message for + DocumentService.SetAcl. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.SetAclResponse: + Response message for + DocumentService.SetAcl. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/documents/*}:setAcl", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*}:setAcl", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_acl(request, metadata) + pb_request = document_service_request.SetAclRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.SetAclResponse() + pb_resp = document_service.SetAclResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_acl(resp) + return resp + + class _UpdateDocument(DocumentServiceRestStub): + def __hash__(self): + return hash("UpdateDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service_request.UpdateDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.UpdateDocumentResponse: + r"""Call the update document method over HTTP. + + Args: + request (~.document_service_request.UpdateDocumentRequest): + The request object. Request message for + DocumentService.UpdateDocument. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.UpdateDocumentResponse: + Response message for + DocumentService.UpdateDocument. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{name=projects/*/locations/*/documents/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v1/{name=projects/*/locations/*/documents/referenceId/*}", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_update_document(request, metadata) + pb_request = document_service_request.UpdateDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.UpdateDocumentResponse() + pb_resp = document_service.UpdateDocumentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_document(resp) + return resp + + @property + def create_document( + self, + ) -> Callable[ + [document_service_request.CreateDocumentRequest], + document_service.CreateDocumentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_document( + self, + ) -> Callable[[document_service_request.DeleteDocumentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_acl( + self, + ) -> Callable[ + [document_service_request.FetchAclRequest], document_service.FetchAclResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchAcl(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_document( + self, + ) -> Callable[[document_service_request.GetDocumentRequest], gcc_document.Document]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def lock_document( + self, + ) -> Callable[ + [document_service_request.LockDocumentRequest], gcc_document.Document + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LockDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def search_documents( + self, + ) -> Callable[ + [document_service_request.SearchDocumentsRequest], + document_service.SearchDocumentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchDocuments(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_acl( + self, + ) -> Callable[ + [document_service_request.SetAclRequest], document_service.SetAclResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetAcl(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_document( + self, + ) -> Callable[ + [document_service_request.UpdateDocumentRequest], + document_service.UpdateDocumentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(DocumentServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DocumentServiceRestTransport",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py index f04e7b1a7135..7a72a0d97cc6 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py @@ -778,6 +778,60 @@ async def sample_list_rule_sets(): # Done; return the response. return response + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py index 713ac3ad6d32..a80a91ad7b17 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py @@ -54,6 +54,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, RuleSetServiceTransport from .transports.grpc import RuleSetServiceGrpcTransport from .transports.grpc_asyncio import RuleSetServiceGrpcAsyncIOTransport +from .transports.rest import RuleSetServiceRestTransport class RuleSetServiceClientMeta(type): @@ -69,6 +70,7 @@ class RuleSetServiceClientMeta(type): ) # type: Dict[str, Type[RuleSetServiceTransport]] _transport_registry["grpc"] = RuleSetServiceGrpcTransport _transport_registry["grpc_asyncio"] = RuleSetServiceGrpcAsyncIOTransport + _transport_registry["rest"] = RuleSetServiceRestTransport def get_transport_class( cls, @@ -1035,6 +1037,60 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/__init__.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/__init__.py index 0b8bed110eaf..33e619d62798 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/__init__.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/__init__.py @@ -19,14 +19,18 @@ from .base import RuleSetServiceTransport from .grpc import RuleSetServiceGrpcTransport from .grpc_asyncio import RuleSetServiceGrpcAsyncIOTransport +from .rest import RuleSetServiceRestInterceptor, RuleSetServiceRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[RuleSetServiceTransport]] _transport_registry["grpc"] = RuleSetServiceGrpcTransport _transport_registry["grpc_asyncio"] = RuleSetServiceGrpcAsyncIOTransport +_transport_registry["rest"] = RuleSetServiceRestTransport __all__ = ( "RuleSetServiceTransport", "RuleSetServiceGrpcTransport", "RuleSetServiceGrpcAsyncIOTransport", + "RuleSetServiceRestTransport", + "RuleSetServiceRestInterceptor", ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/base.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/base.py index bdf24d1b50aa..07aa8d44dc68 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/base.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/base.py @@ -226,6 +226,15 @@ def list_rule_sets( ]: raise NotImplementedError() + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc.py index 519c9a5a6370..c90a894fa07d 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc.py @@ -368,6 +368,23 @@ def list_rule_sets( def close(self): self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc_asyncio.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc_asyncio.py index 4ffbfb25eff7..950e30d5435e 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc_asyncio.py @@ -379,5 +379,22 @@ def list_rule_sets( def close(self): return self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + __all__ = ("RuleSetServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest.py new file mode 100644 index 000000000000..4b3708b65bc1 --- /dev/null +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest.py @@ -0,0 +1,900 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.contentwarehouse_v1.types import rule_engine, ruleset_service_request + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import RuleSetServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RuleSetServiceRestInterceptor: + """Interceptor for RuleSetService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RuleSetServiceRestTransport. + + .. code-block:: python + class MyCustomRuleSetServiceInterceptor(RuleSetServiceRestInterceptor): + def pre_create_rule_set(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_rule_set(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_rule_set(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_rule_set(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_rule_set(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_rule_sets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_rule_sets(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_rule_set(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_rule_set(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RuleSetServiceRestTransport(interceptor=MyCustomRuleSetServiceInterceptor()) + client = RuleSetServiceClient(transport=transport) + + + """ + + def pre_create_rule_set( + self, + request: ruleset_service_request.CreateRuleSetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ruleset_service_request.CreateRuleSetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_rule_set + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuleSetService server. + """ + return request, metadata + + def post_create_rule_set( + self, response: rule_engine.RuleSet + ) -> rule_engine.RuleSet: + """Post-rpc interceptor for create_rule_set + + Override in a subclass to manipulate the response + after it is returned by the RuleSetService server but before + it is returned to user code. + """ + return response + + def pre_delete_rule_set( + self, + request: ruleset_service_request.DeleteRuleSetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ruleset_service_request.DeleteRuleSetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_rule_set + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuleSetService server. + """ + return request, metadata + + def pre_get_rule_set( + self, + request: ruleset_service_request.GetRuleSetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ruleset_service_request.GetRuleSetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_rule_set + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuleSetService server. + """ + return request, metadata + + def post_get_rule_set(self, response: rule_engine.RuleSet) -> rule_engine.RuleSet: + """Post-rpc interceptor for get_rule_set + + Override in a subclass to manipulate the response + after it is returned by the RuleSetService server but before + it is returned to user code. + """ + return response + + def pre_list_rule_sets( + self, + request: ruleset_service_request.ListRuleSetsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ruleset_service_request.ListRuleSetsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_rule_sets + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuleSetService server. + """ + return request, metadata + + def post_list_rule_sets( + self, response: ruleset_service_request.ListRuleSetsResponse + ) -> ruleset_service_request.ListRuleSetsResponse: + """Post-rpc interceptor for list_rule_sets + + Override in a subclass to manipulate the response + after it is returned by the RuleSetService server but before + it is returned to user code. + """ + return response + + def pre_update_rule_set( + self, + request: ruleset_service_request.UpdateRuleSetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ruleset_service_request.UpdateRuleSetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_rule_set + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuleSetService server. + """ + return request, metadata + + def post_update_rule_set( + self, response: rule_engine.RuleSet + ) -> rule_engine.RuleSet: + """Post-rpc interceptor for update_rule_set + + Override in a subclass to manipulate the response + after it is returned by the RuleSetService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RuleSetService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the RuleSetService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RuleSetServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RuleSetServiceRestInterceptor + + +class RuleSetServiceRestTransport(RuleSetServiceTransport): + """REST backend transport for RuleSetService. + + Service to manage customer specific RuleSets. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "contentwarehouse.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[RuleSetServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RuleSetServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateRuleSet(RuleSetServiceRestStub): + def __hash__(self): + return hash("CreateRuleSet") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: ruleset_service_request.CreateRuleSetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rule_engine.RuleSet: + r"""Call the create rule set method over HTTP. + + Args: + request (~.ruleset_service_request.CreateRuleSetRequest): + The request object. Request message for + RuleSetService.CreateRuleSet. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.rule_engine.RuleSet: + Represents a set of rules from a + single customer. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/ruleSets", + "body": "rule_set", + }, + ] + request, metadata = self._interceptor.pre_create_rule_set(request, metadata) + pb_request = ruleset_service_request.CreateRuleSetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rule_engine.RuleSet() + pb_resp = rule_engine.RuleSet.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_rule_set(resp) + return resp + + class _DeleteRuleSet(RuleSetServiceRestStub): + def __hash__(self): + return hash("DeleteRuleSet") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: ruleset_service_request.DeleteRuleSetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete rule set method over HTTP. + + Args: + request (~.ruleset_service_request.DeleteRuleSetRequest): + The request object. Request message for + RuleSetService.DeleteRuleSet. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/ruleSets/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_rule_set(request, metadata) + pb_request = ruleset_service_request.DeleteRuleSetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetRuleSet(RuleSetServiceRestStub): + def __hash__(self): + return hash("GetRuleSet") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: ruleset_service_request.GetRuleSetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rule_engine.RuleSet: + r"""Call the get rule set method over HTTP. + + Args: + request (~.ruleset_service_request.GetRuleSetRequest): + The request object. Request message for + RuleSetService.GetRuleSet. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.rule_engine.RuleSet: + Represents a set of rules from a + single customer. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/ruleSets/*}", + }, + ] + request, metadata = self._interceptor.pre_get_rule_set(request, metadata) + pb_request = ruleset_service_request.GetRuleSetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rule_engine.RuleSet() + pb_resp = rule_engine.RuleSet.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_rule_set(resp) + return resp + + class _ListRuleSets(RuleSetServiceRestStub): + def __hash__(self): + return hash("ListRuleSets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: ruleset_service_request.ListRuleSetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> ruleset_service_request.ListRuleSetsResponse: + r"""Call the list rule sets method over HTTP. + + Args: + request (~.ruleset_service_request.ListRuleSetsRequest): + The request object. Request message for + RuleSetService.ListRuleSets. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.ruleset_service_request.ListRuleSetsResponse: + Response message for + RuleSetService.ListRuleSets. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/ruleSets", + }, + ] + request, metadata = self._interceptor.pre_list_rule_sets(request, metadata) + pb_request = ruleset_service_request.ListRuleSetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ruleset_service_request.ListRuleSetsResponse() + pb_resp = ruleset_service_request.ListRuleSetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_rule_sets(resp) + return resp + + class _UpdateRuleSet(RuleSetServiceRestStub): + def __hash__(self): + return hash("UpdateRuleSet") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: ruleset_service_request.UpdateRuleSetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rule_engine.RuleSet: + r"""Call the update rule set method over HTTP. + + Args: + request (~.ruleset_service_request.UpdateRuleSetRequest): + The request object. Request message for + RuleSetService.UpdateRuleSet. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.rule_engine.RuleSet: + Represents a set of rules from a + single customer. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{name=projects/*/locations/*/ruleSets/*}", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_update_rule_set(request, metadata) + pb_request = ruleset_service_request.UpdateRuleSetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rule_engine.RuleSet() + pb_resp = rule_engine.RuleSet.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_rule_set(resp) + return resp + + @property + def create_rule_set( + self, + ) -> Callable[[ruleset_service_request.CreateRuleSetRequest], rule_engine.RuleSet]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateRuleSet(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_rule_set( + self, + ) -> Callable[[ruleset_service_request.DeleteRuleSetRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteRuleSet(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_rule_set( + self, + ) -> Callable[[ruleset_service_request.GetRuleSetRequest], rule_engine.RuleSet]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRuleSet(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_rule_sets( + self, + ) -> Callable[ + [ruleset_service_request.ListRuleSetsRequest], + ruleset_service_request.ListRuleSetsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRuleSets(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_rule_set( + self, + ) -> Callable[[ruleset_service_request.UpdateRuleSetRequest], rule_engine.RuleSet]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateRuleSet(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(RuleSetServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("RuleSetServiceRestTransport",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py index 371322061ace..8a066bf9ae30 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py @@ -388,8 +388,8 @@ async def sample_get_synonym_set(): Args: request (Optional[Union[google.cloud.contentwarehouse_v1.types.GetSynonymSetRequest, dict]]): The request object. Request message for - SynonymSetService.GetSynonymSet. Will return synonymSet - for a certain context. + SynonymSetService.GetSynonymSet. Will + return synonymSet for a certain context. name (:class:`str`): Required. The name of the synonymSet to retrieve Format: projects/{project_number}/locations/{location}/synonymSets/{context}. @@ -511,9 +511,10 @@ async def sample_update_synonym_set(): Args: request (Optional[Union[google.cloud.contentwarehouse_v1.types.UpdateSynonymSetRequest, dict]]): The request object. Request message for - SynonymSetService.UpdateSynonymSet. Removes the - SynonymSet for the specified context and replaces it - with the SynonymSet in this request. + SynonymSetService.UpdateSynonymSet. + Removes the SynonymSet for the specified + context and replaces it with the + SynonymSet in this request. name (:class:`str`): Required. The name of the synonymSet to update Format: projects/{project_number}/locations/{location}/synonymSets/{context}. @@ -727,8 +728,9 @@ async def sample_list_synonym_sets(): Args: request (Optional[Union[google.cloud.contentwarehouse_v1.types.ListSynonymSetsRequest, dict]]): The request object. Request message for - SynonymSetService.ListSynonymSets. Will return all - synonymSets belonging to the customer project. + SynonymSetService.ListSynonymSets. Will + return all synonymSets belonging to the + customer project. parent (:class:`str`): Required. The parent name. Format: projects/{project_number}/locations/{location}. @@ -811,6 +813,60 @@ async def sample_list_synonym_sets(): # Done; return the response. return response + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py index 1583a7828955..b190c0f46f81 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py @@ -57,6 +57,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, SynonymSetServiceTransport from .transports.grpc import SynonymSetServiceGrpcTransport from .transports.grpc_asyncio import SynonymSetServiceGrpcAsyncIOTransport +from .transports.rest import SynonymSetServiceRestTransport class SynonymSetServiceClientMeta(type): @@ -72,6 +73,7 @@ class SynonymSetServiceClientMeta(type): ) # type: Dict[str, Type[SynonymSetServiceTransport]] _transport_registry["grpc"] = SynonymSetServiceGrpcTransport _transport_registry["grpc_asyncio"] = SynonymSetServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SynonymSetServiceRestTransport def get_transport_class( cls, @@ -628,8 +630,8 @@ def sample_get_synonym_set(): Args: request (Union[google.cloud.contentwarehouse_v1.types.GetSynonymSetRequest, dict]): The request object. Request message for - SynonymSetService.GetSynonymSet. Will return synonymSet - for a certain context. + SynonymSetService.GetSynonymSet. Will + return synonymSet for a certain context. name (str): Required. The name of the synonymSet to retrieve Format: projects/{project_number}/locations/{location}/synonymSets/{context}. @@ -742,9 +744,10 @@ def sample_update_synonym_set(): Args: request (Union[google.cloud.contentwarehouse_v1.types.UpdateSynonymSetRequest, dict]): The request object. Request message for - SynonymSetService.UpdateSynonymSet. Removes the - SynonymSet for the specified context and replaces it - with the SynonymSet in this request. + SynonymSetService.UpdateSynonymSet. + Removes the SynonymSet for the specified + context and replaces it with the + SynonymSet in this request. name (str): Required. The name of the synonymSet to update Format: projects/{project_number}/locations/{location}/synonymSets/{context}. @@ -958,8 +961,9 @@ def sample_list_synonym_sets(): Args: request (Union[google.cloud.contentwarehouse_v1.types.ListSynonymSetsRequest, dict]): The request object. Request message for - SynonymSetService.ListSynonymSets. Will return all - synonymSets belonging to the customer project. + SynonymSetService.ListSynonymSets. Will + return all synonymSets belonging to the + customer project. parent (str): Required. The parent name. Format: projects/{project_number}/locations/{location}. @@ -1046,6 +1050,60 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/__init__.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/__init__.py index 040ad141280d..4b0117751b8b 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/__init__.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/__init__.py @@ -19,14 +19,18 @@ from .base import SynonymSetServiceTransport from .grpc import SynonymSetServiceGrpcTransport from .grpc_asyncio import SynonymSetServiceGrpcAsyncIOTransport +from .rest import SynonymSetServiceRestInterceptor, SynonymSetServiceRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[SynonymSetServiceTransport]] _transport_registry["grpc"] = SynonymSetServiceGrpcTransport _transport_registry["grpc_asyncio"] = SynonymSetServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SynonymSetServiceRestTransport __all__ = ( "SynonymSetServiceTransport", "SynonymSetServiceGrpcTransport", "SynonymSetServiceGrpcAsyncIOTransport", + "SynonymSetServiceRestTransport", + "SynonymSetServiceRestInterceptor", ) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/base.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/base.py index 4b4fda18a364..551f579b38a3 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/base.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/base.py @@ -229,6 +229,15 @@ def list_synonym_sets( ]: raise NotImplementedError() + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc.py index af4734dc9c87..3cc74310fac2 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc.py @@ -384,6 +384,23 @@ def list_synonym_sets( def close(self): self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc_asyncio.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc_asyncio.py index a3da90f97367..1783c3ba6c0d 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc_asyncio.py @@ -390,5 +390,22 @@ def list_synonym_sets( def close(self): return self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + __all__ = ("SynonymSetServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest.py new file mode 100644 index 000000000000..79e73ed8f452 --- /dev/null +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest.py @@ -0,0 +1,955 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.contentwarehouse_v1.types import ( + synonymset, + synonymset_service_request, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SynonymSetServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SynonymSetServiceRestInterceptor: + """Interceptor for SynonymSetService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SynonymSetServiceRestTransport. + + .. code-block:: python + class MyCustomSynonymSetServiceInterceptor(SynonymSetServiceRestInterceptor): + def pre_create_synonym_set(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_synonym_set(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_synonym_set(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_synonym_set(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_synonym_set(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_synonym_sets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_synonym_sets(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_synonym_set(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_synonym_set(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SynonymSetServiceRestTransport(interceptor=MyCustomSynonymSetServiceInterceptor()) + client = SynonymSetServiceClient(transport=transport) + + + """ + + def pre_create_synonym_set( + self, + request: synonymset_service_request.CreateSynonymSetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + synonymset_service_request.CreateSynonymSetRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_synonym_set + + Override in a subclass to manipulate the request or metadata + before they are sent to the SynonymSetService server. + """ + return request, metadata + + def post_create_synonym_set( + self, response: synonymset.SynonymSet + ) -> synonymset.SynonymSet: + """Post-rpc interceptor for create_synonym_set + + Override in a subclass to manipulate the response + after it is returned by the SynonymSetService server but before + it is returned to user code. + """ + return response + + def pre_delete_synonym_set( + self, + request: synonymset_service_request.DeleteSynonymSetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + synonymset_service_request.DeleteSynonymSetRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_synonym_set + + Override in a subclass to manipulate the request or metadata + before they are sent to the SynonymSetService server. + """ + return request, metadata + + def pre_get_synonym_set( + self, + request: synonymset_service_request.GetSynonymSetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + synonymset_service_request.GetSynonymSetRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_synonym_set + + Override in a subclass to manipulate the request or metadata + before they are sent to the SynonymSetService server. + """ + return request, metadata + + def post_get_synonym_set( + self, response: synonymset.SynonymSet + ) -> synonymset.SynonymSet: + """Post-rpc interceptor for get_synonym_set + + Override in a subclass to manipulate the response + after it is returned by the SynonymSetService server but before + it is returned to user code. + """ + return response + + def pre_list_synonym_sets( + self, + request: synonymset_service_request.ListSynonymSetsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + synonymset_service_request.ListSynonymSetsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_synonym_sets + + Override in a subclass to manipulate the request or metadata + before they are sent to the SynonymSetService server. + """ + return request, metadata + + def post_list_synonym_sets( + self, response: synonymset_service_request.ListSynonymSetsResponse + ) -> synonymset_service_request.ListSynonymSetsResponse: + """Post-rpc interceptor for list_synonym_sets + + Override in a subclass to manipulate the response + after it is returned by the SynonymSetService server but before + it is returned to user code. + """ + return response + + def pre_update_synonym_set( + self, + request: synonymset_service_request.UpdateSynonymSetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + synonymset_service_request.UpdateSynonymSetRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_synonym_set + + Override in a subclass to manipulate the request or metadata + before they are sent to the SynonymSetService server. + """ + return request, metadata + + def post_update_synonym_set( + self, response: synonymset.SynonymSet + ) -> synonymset.SynonymSet: + """Post-rpc interceptor for update_synonym_set + + Override in a subclass to manipulate the response + after it is returned by the SynonymSetService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SynonymSetService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SynonymSetService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SynonymSetServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SynonymSetServiceRestInterceptor + + +class SynonymSetServiceRestTransport(SynonymSetServiceTransport): + """REST backend transport for SynonymSetService. + + A Service that manage/custom customer specified SynonymSets. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "contentwarehouse.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SynonymSetServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SynonymSetServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateSynonymSet(SynonymSetServiceRestStub): + def __hash__(self): + return hash("CreateSynonymSet") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: synonymset_service_request.CreateSynonymSetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> synonymset.SynonymSet: + r"""Call the create synonym set method over HTTP. + + Args: + request (~.synonymset_service_request.CreateSynonymSetRequest): + The request object. Request message for + SynonymSetService.CreateSynonymSet. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.synonymset.SynonymSet: + Represents a list of synonyms for a + given context. For example a context + "sales" could contain: Synonym 1: sale, + invoice, bill, order + Synonym 2: money, credit, finance, + payment Synonym 3: shipping, freight, + transport + Each SynonymSets should be disjoint + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/synonymSets", + "body": "synonym_set", + }, + ] + request, metadata = self._interceptor.pre_create_synonym_set( + request, metadata + ) + pb_request = synonymset_service_request.CreateSynonymSetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = synonymset.SynonymSet() + pb_resp = synonymset.SynonymSet.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_synonym_set(resp) + return resp + + class _DeleteSynonymSet(SynonymSetServiceRestStub): + def __hash__(self): + return hash("DeleteSynonymSet") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: synonymset_service_request.DeleteSynonymSetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete synonym set method over HTTP. + + Args: + request (~.synonymset_service_request.DeleteSynonymSetRequest): + The request object. Request message for + SynonymSetService.DeleteSynonymSet. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/synonymSets/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_synonym_set( + request, metadata + ) + pb_request = synonymset_service_request.DeleteSynonymSetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetSynonymSet(SynonymSetServiceRestStub): + def __hash__(self): + return hash("GetSynonymSet") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: synonymset_service_request.GetSynonymSetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> synonymset.SynonymSet: + r"""Call the get synonym set method over HTTP. + + Args: + request (~.synonymset_service_request.GetSynonymSetRequest): + The request object. Request message for + SynonymSetService.GetSynonymSet. Will + return synonymSet for a certain context. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.synonymset.SynonymSet: + Represents a list of synonyms for a + given context. For example a context + "sales" could contain: Synonym 1: sale, + invoice, bill, order + Synonym 2: money, credit, finance, + payment Synonym 3: shipping, freight, + transport + Each SynonymSets should be disjoint + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/synonymSets/*}", + }, + ] + request, metadata = self._interceptor.pre_get_synonym_set(request, metadata) + pb_request = synonymset_service_request.GetSynonymSetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = synonymset.SynonymSet() + pb_resp = synonymset.SynonymSet.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_synonym_set(resp) + return resp + + class _ListSynonymSets(SynonymSetServiceRestStub): + def __hash__(self): + return hash("ListSynonymSets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: synonymset_service_request.ListSynonymSetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> synonymset_service_request.ListSynonymSetsResponse: + r"""Call the list synonym sets method over HTTP. + + Args: + request (~.synonymset_service_request.ListSynonymSetsRequest): + The request object. Request message for + SynonymSetService.ListSynonymSets. Will + return all synonymSets belonging to the + customer project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.synonymset_service_request.ListSynonymSetsResponse: + Response message for + SynonymSetService.ListSynonymSets. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/synonymSets", + }, + ] + request, metadata = self._interceptor.pre_list_synonym_sets( + request, metadata + ) + pb_request = synonymset_service_request.ListSynonymSetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = synonymset_service_request.ListSynonymSetsResponse() + pb_resp = synonymset_service_request.ListSynonymSetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_synonym_sets(resp) + return resp + + class _UpdateSynonymSet(SynonymSetServiceRestStub): + def __hash__(self): + return hash("UpdateSynonymSet") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: synonymset_service_request.UpdateSynonymSetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> synonymset.SynonymSet: + r"""Call the update synonym set method over HTTP. + + Args: + request (~.synonymset_service_request.UpdateSynonymSetRequest): + The request object. Request message for + SynonymSetService.UpdateSynonymSet. + Removes the SynonymSet for the specified + context and replaces it with the + SynonymSet in this request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.synonymset.SynonymSet: + Represents a list of synonyms for a + given context. For example a context + "sales" could contain: Synonym 1: sale, + invoice, bill, order + Synonym 2: money, credit, finance, + payment Synonym 3: shipping, freight, + transport + Each SynonymSets should be disjoint + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{name=projects/*/locations/*/synonymSets/*}", + "body": "synonym_set", + }, + ] + request, metadata = self._interceptor.pre_update_synonym_set( + request, metadata + ) + pb_request = synonymset_service_request.UpdateSynonymSetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = synonymset.SynonymSet() + pb_resp = synonymset.SynonymSet.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_synonym_set(resp) + return resp + + @property + def create_synonym_set( + self, + ) -> Callable[ + [synonymset_service_request.CreateSynonymSetRequest], synonymset.SynonymSet + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSynonymSet(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_synonym_set( + self, + ) -> Callable[ + [synonymset_service_request.DeleteSynonymSetRequest], empty_pb2.Empty + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSynonymSet(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_synonym_set( + self, + ) -> Callable[ + [synonymset_service_request.GetSynonymSetRequest], synonymset.SynonymSet + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSynonymSet(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_synonym_sets( + self, + ) -> Callable[ + [synonymset_service_request.ListSynonymSetsRequest], + synonymset_service_request.ListSynonymSetsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSynonymSets(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_synonym_set( + self, + ) -> Callable[ + [synonymset_service_request.UpdateSynonymSetRequest], synonymset.SynonymSet + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSynonymSet(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(SynonymSetServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SynonymSetServiceRestTransport",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/__init__.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/__init__.py index 9830887373b6..fef2b5e5e1a3 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/__init__.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/__init__.py @@ -20,6 +20,7 @@ from .common import ( AccessControlMode, DatabaseType, + DocumentCreatorDefaultRole, MergeFieldsOptions, RequestMetadata, ResponseMetadata, @@ -28,6 +29,7 @@ UserInfo, ) from .document import ( + ContentCategory, DateTimeArray, Document, DocumentReference, @@ -87,11 +89,19 @@ DeleteDocumentRequest, FetchAclRequest, GetDocumentRequest, + LockDocumentRequest, SearchDocumentsRequest, SetAclRequest, UpdateDocumentRequest, ) -from .filters import DocumentQuery, FileTypeFilter, PropertyFilter, TimeFilter +from .filters import ( + CustomWeightsMetadata, + DocumentQuery, + FileTypeFilter, + PropertyFilter, + TimeFilter, + WeightedSchemaProperty, +) from .histogram import ( HistogramQuery, HistogramQueryPropertyNameFilter, @@ -143,6 +153,7 @@ "UserInfo", "AccessControlMode", "DatabaseType", + "DocumentCreatorDefaultRole", "UpdateType", "DateTimeArray", "Document", @@ -158,6 +169,7 @@ "TimestampArray", "TimestampValue", "Value", + "ContentCategory", "RawDocumentFileType", "CreateDocumentLinkRequest", "DeleteDocumentLinkRequest", @@ -193,13 +205,16 @@ "DeleteDocumentRequest", "FetchAclRequest", "GetDocumentRequest", + "LockDocumentRequest", "SearchDocumentsRequest", "SetAclRequest", "UpdateDocumentRequest", + "CustomWeightsMetadata", "DocumentQuery", "FileTypeFilter", "PropertyFilter", "TimeFilter", + "WeightedSchemaProperty", "HistogramQuery", "HistogramQueryPropertyNameFilter", "HistogramQueryResult", diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/async_document_service_request.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/async_document_service_request.py index 951f5aa55b44..0190ceedcc7e 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/async_document_service_request.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/async_document_service_request.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/common.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/common.py index 844030cca5ed..638b3d8d4f00 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/common.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/common.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import field_mask_pb2 # type: ignore @@ -24,6 +26,7 @@ "UpdateType", "DatabaseType", "AccessControlMode", + "DocumentCreatorDefaultRole", "RequestMetadata", "ResponseMetadata", "UserInfo", @@ -40,7 +43,8 @@ class UpdateType(proto.Enum): UPDATE_TYPE_UNSPECIFIED (0): Defaults to full replace behavior, ie. FULL_REPLACE. UPDATE_TYPE_REPLACE (1): - Fully replace all the fields. Any field masks + Fully replace all the fields (including + previously linked raw document). Any field masks will be ignored. UPDATE_TYPE_MERGE (2): Merge the fields into the existing entities. @@ -50,6 +54,12 @@ class UpdateType(proto.Enum): Replace the properties by names. UPDATE_TYPE_DELETE_PROPERTIES_BY_NAMES (5): Delete the properties by names. + UPDATE_TYPE_MERGE_AND_REPLACE_OR_INSERT_PROPERTIES_BY_NAMES (6): + For each of the property, replaces the + property if the it exists, otherwise inserts a + new property. And for the rest of the fields, + merge them based on update mask and merge fields + options. """ UPDATE_TYPE_UNSPECIFIED = 0 UPDATE_TYPE_REPLACE = 1 @@ -57,6 +67,7 @@ class UpdateType(proto.Enum): UPDATE_TYPE_INSERT_PROPERTIES_BY_NAMES = 3 UPDATE_TYPE_REPLACE_PROPERTIES_BY_NAMES = 4 UPDATE_TYPE_DELETE_PROPERTIES_BY_NAMES = 5 + UPDATE_TYPE_MERGE_AND_REPLACE_OR_INSERT_PROPERTIES_BY_NAMES = 6 class DatabaseType(proto.Enum): @@ -99,6 +110,29 @@ class AccessControlMode(proto.Enum): ACL_MODE_DOCUMENT_LEVEL_ACCESS_CONTROL_GCI = 3 +class DocumentCreatorDefaultRole(proto.Enum): + r"""The default role of the document creator. + + Values: + DOCUMENT_CREATOR_DEFAULT_ROLE_UNSPECIFIED (0): + Unspecified, will be default to document + admin role. + DOCUMENT_ADMIN (1): + Document Admin, same as + contentwarehouse.googleapis.com/documentAdmin. + DOCUMENT_EDITOR (2): + Document Editor, same as + contentwarehouse.googleapis.com/documentEditor. + DOCUMENT_VIEWER (3): + Document Viewer, same as + contentwarehouse.googleapis.com/documentViewer. + """ + DOCUMENT_CREATOR_DEFAULT_ROLE_UNSPECIFIED = 0 + DOCUMENT_ADMIN = 1 + DOCUMENT_EDITOR = 2 + DOCUMENT_VIEWER = 3 + + class RequestMetadata(proto.Message): r"""Meta information is used to improve the performance of the service. @@ -133,7 +167,7 @@ class ResponseMetadata(proto.Message): class UserInfo(proto.Message): - r""" + r"""The user information. Attributes: id (str): diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document.py index bc971f675aab..13fe49ce2feb 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document.py @@ -13,9 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence -import google.cloud.documentai_v1.types +from google.cloud.documentai_v1.types import document as gcd_document from google.protobuf import timestamp_pb2 # type: ignore from google.type import datetime_pb2 # type: ignore import proto # type: ignore @@ -24,6 +26,7 @@ package="google.cloud.contentwarehouse.v1", manifest={ "RawDocumentFileType", + "ContentCategory", "Document", "DocumentReference", "Property", @@ -60,6 +63,8 @@ class RawDocumentFileType(proto.Enum): Microsoft Powerpoint format RAW_DOCUMENT_FILE_TYPE_TEXT (5): UTF-8 encoded text format + RAW_DOCUMENT_FILE_TYPE_TIFF (6): + TIFF or TIF image file format """ RAW_DOCUMENT_FILE_TYPE_UNSPECIFIED = 0 RAW_DOCUMENT_FILE_TYPE_PDF = 1 @@ -67,6 +72,27 @@ class RawDocumentFileType(proto.Enum): RAW_DOCUMENT_FILE_TYPE_XLSX = 3 RAW_DOCUMENT_FILE_TYPE_PPTX = 4 RAW_DOCUMENT_FILE_TYPE_TEXT = 5 + RAW_DOCUMENT_FILE_TYPE_TIFF = 6 + + +class ContentCategory(proto.Enum): + r"""When a raw document or structured content is supplied, this + stores the content category. + + Values: + CONTENT_CATEGORY_UNSPECIFIED (0): + No category is specified. + CONTENT_CATEGORY_IMAGE (1): + Content is of image type. + CONTENT_CATEGORY_AUDIO (2): + Content is of audio type. + CONTENT_CATEGORY_VIDEO (3): + Content is of video type. + """ + CONTENT_CATEGORY_UNSPECIFIED = 0 + CONTENT_CATEGORY_IMAGE = 1 + CONTENT_CATEGORY_AUDIO = 2 + CONTENT_CATEGORY_VIDEO = 3 class Document(proto.Message): @@ -97,9 +123,8 @@ class Document(proto.Message): the top heading in the document. title (str): Title that describes the document. - This is usually present in the top section of - the document, and is a mandatory field for the - question-answering feature. + This can be the top heading or text that + describes the document. display_uri (str): Uri to display the document, for example, in the UI. @@ -143,9 +168,14 @@ class Document(proto.Message): async_enabled (bool): If true, makes the document visible to asynchronous policies and rules. + content_category (google.cloud.contentwarehouse_v1.types.ContentCategory): + Indicates the category (image, audio, video + etc.) of the original content. text_extraction_disabled (bool): If true, text extraction will not be performed. + text_extraction_enabled (bool): + If true, text extraction will be performed. creator (str): The user who creates the document. updater (str): @@ -181,11 +211,11 @@ class Document(proto.Message): number=15, oneof="structured_content", ) - cloud_ai_document: google.cloud.documentai_v1.types.Document = proto.Field( + cloud_ai_document: gcd_document.Document = proto.Field( proto.MESSAGE, number=4, oneof="structured_content", - message=google.cloud.documentai_v1.types.Document, + message=gcd_document.Document, ) structured_content_uri: str = proto.Field( proto.STRING, @@ -225,10 +255,19 @@ class Document(proto.Message): proto.BOOL, number=12, ) + content_category: "ContentCategory" = proto.Field( + proto.ENUM, + number=20, + enum="ContentCategory", + ) text_extraction_disabled: bool = proto.Field( proto.BOOL, number=19, ) + text_extraction_enabled: bool = proto.Field( + proto.BOOL, + number=21, + ) creator: str = proto.Field( proto.STRING, number=13, diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_link_service.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_link_service.py index aa68700dce98..e3832ad5a3c9 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_link_service.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_link_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import timestamp_pb2 # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_schema.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_schema.py index a83d00d5f05e..6bbbabc809d9 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_schema.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_schema.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import timestamp_pb2 # type: ignore @@ -46,7 +48,7 @@ class DocumentSchema(proto.Message): The name is ignored when creating a document schema. display_name (str): Required. Name of the schema given by the - user. Must be unique per customer. + user. Must be unique per project. property_definitions (MutableSequence[google.cloud.contentwarehouse_v1.types.PropertyDefinition]): Document details. document_is_folder (bool): @@ -126,12 +128,18 @@ class PropertyDefinition(proto.Message): included in a global search. is_metadata (bool): Whether the property is user supplied - metadata. + metadata. This out-of-the box placeholder + setting can be used to tag derived properties. + Its value and interpretation logic should be + implemented by API user. is_required (bool): Whether the property is mandatory. Default is 'false', i.e. populating property value can be skipped. If 'true' then user must populate the value for this property. + retrieval_importance (google.cloud.contentwarehouse_v1.types.PropertyDefinition.RetrievalImportance): + The retrieval importance of the property + during search. integer_type_options (google.cloud.contentwarehouse_v1.types.IntegerTypeOptions): Integer property. @@ -168,8 +176,58 @@ class PropertyDefinition(proto.Message): deployment. This field is a member of `oneof`_ ``value_type_options``. + schema_sources (MutableSequence[google.cloud.contentwarehouse_v1.types.PropertyDefinition.SchemaSource]): + The mapping information between this property + to another schema source. """ + class RetrievalImportance(proto.Enum): + r"""Stores the retrieval importance. + + Values: + RETRIEVAL_IMPORTANCE_UNSPECIFIED (0): + No importance specified. Default medium + importance. + HIGHEST (1): + Highest importance. + HIGHER (2): + Higher importance. + HIGH (3): + High importance. + MEDIUM (4): + Medium importance. + LOW (5): + Low importance (negative). + LOWEST (6): + Lowest importance (negative). + """ + RETRIEVAL_IMPORTANCE_UNSPECIFIED = 0 + HIGHEST = 1 + HIGHER = 2 + HIGH = 3 + MEDIUM = 4 + LOW = 5 + LOWEST = 6 + + class SchemaSource(proto.Message): + r"""The schema source information. + + Attributes: + name (str): + The schema name in the source. + processor_type (str): + The Doc AI processor type name. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + processor_type: str = proto.Field( + proto.STRING, + number=2, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -198,6 +256,11 @@ class PropertyDefinition(proto.Message): proto.BOOL, number=14, ) + retrieval_importance: RetrievalImportance = proto.Field( + proto.ENUM, + number=18, + enum=RetrievalImportance, + ) integer_type_options: "IntegerTypeOptions" = proto.Field( proto.MESSAGE, number=7, @@ -246,6 +309,11 @@ class PropertyDefinition(proto.Message): oneof="value_type_options", message="TimestampTypeOptions", ) + schema_sources: MutableSequence[SchemaSource] = proto.RepeatedField( + proto.MESSAGE, + number=19, + message=SchemaSource, + ) class IntegerTypeOptions(proto.Message): diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_schema_service.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_schema_service.py index f6412fb8393b..37e829776546 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_schema_service.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_schema_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service.py index 28253fe77f24..cb8eb09f5111 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service.py @@ -13,9 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore import proto # type: ignore from google.cloud.contentwarehouse_v1.types import common @@ -51,6 +54,8 @@ class CreateDocumentResponse(proto.Message): metadata (google.cloud.contentwarehouse_v1.types.ResponseMetadata): Additional information for the API invocation, such as the request tracking id. + long_running_operations (MutableSequence[google.longrunning.operations_pb2.Operation]): + post-processing LROs """ document: gcc_document.Document = proto.Field( @@ -68,6 +73,13 @@ class CreateDocumentResponse(proto.Message): number=3, message=common.ResponseMetadata, ) + long_running_operations: MutableSequence[ + operations_pb2.Operation + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=operations_pb2.Operation, + ) class UpdateDocumentResponse(proto.Message): @@ -165,11 +177,12 @@ class SearchDocumentsResponse(proto.Message): The total number of matched documents which is available only if the client set [SearchDocumentsRequest.require_total_size][google.cloud.contentwarehouse.v1.SearchDocumentsRequest.require_total_size] - to ``true``. Otherwise, the value will be ``-1``. - ``total_size`` will max at "100,000". If this is returned, - then it can be assumed that the count is equal to or greater - than 100,000. Typically a UI would handle this condition by - displaying "of many", for example: "Displaying 10 of many". + to ``true`` or set + [SearchDocumentsRequest.total_result_size][google.cloud.contentwarehouse.v1.SearchDocumentsRequest.total_result_size] + to ``ESTIMATED_SIZE`` or ``ACTUAL_SIZE``. Otherwise, the + value will be ``-1``. Typically a UI would handle this + condition by displaying "of many", for example: "Displaying + 10 of many". metadata (google.cloud.contentwarehouse_v1.types.ResponseMetadata): Additional information for the API invocation, such as the request tracking id. diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service_request.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service_request.py index 5681711658cb..3ffa47e2714d 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service_request.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service_request.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.iam.v1 import policy_pb2 # type: ignore @@ -32,6 +34,7 @@ "UpdateDocumentRequest", "DeleteDocumentRequest", "SearchDocumentsRequest", + "LockDocumentRequest", "FetchAclRequest", "SetAclRequest", }, @@ -79,16 +82,23 @@ class CreateDocumentRequest(proto.Message): service. policy (google.iam.v1.policy_pb2.Policy): Default document policy during creation. + This refers to an Identity and Access (IAM) + policy, which specifies access controls for the + Document. Conditions defined in the policy will be ignored. cloud_ai_document_option (google.cloud.contentwarehouse_v1.types.CloudAIDocumentOption): Request Option for processing Cloud AI - Document in CW Document. + Document in Document Warehouse. This field + offers limited support for mapping entities from + Cloud AI Document to Warehouse Document. Please + consult with product team before using this + field and other available options. create_mask (google.protobuf.field_mask_pb2.FieldMask): Field mask for creating Document fields. If mask path is empty, it means all fields are masked. For the ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. """ parent: str = proto.Field( @@ -165,7 +175,11 @@ class UpdateDocumentRequest(proto.Message): service. cloud_ai_document_option (google.cloud.contentwarehouse_v1.types.CloudAIDocumentOption): Request Option for processing Cloud AI - Document in CW Document. + Document in Document Warehouse. This field + offers limited support for mapping entities from + Cloud AI Document to Warehouse Document. Please + consult with product team before using this + field and other available options. update_options (google.cloud.contentwarehouse_v1.types.UpdateOptions): Options for the update operation. """ @@ -278,6 +292,10 @@ class SearchDocumentsRequest(proto.Message): - ``"upload_date"``: By upload date ascending. - ``"update_date desc"``: By last updated date descending. - ``"update_date"``: By last updated date ascending. + - ``"retrieval_importance desc"``: By retrieval importance + of properties descending. This feature is still under + development, please do not use unless otherwise + instructed to do so. histogram_queries (MutableSequence[google.cloud.contentwarehouse_v1.types.HistogramQuery]): An expression specifying a histogram request against matching documents. Expression syntax is an aggregation @@ -311,8 +329,8 @@ class SearchDocumentsRequest(proto.Message): - For schema id, abc123, get the counts for MORTGAGE_TYPE: count('abc123.MORTGAGE_TYPE') require_total_size (bool): - Optional. Controls if the search document request requires - the return of a total size of matched documents. See + Controls if the search document request requires the return + of a total size of matched documents. See [SearchDocumentsResponse.total_size][google.cloud.contentwarehouse.v1.SearchDocumentsResponse.total_size]. Enabling this flag may adversely impact performance. Hint: @@ -321,6 +339,10 @@ class SearchDocumentsRequest(proto.Message): (keep the total count locally). Defaults to false. + total_result_size (google.cloud.contentwarehouse_v1.types.SearchDocumentsRequest.TotalResultSize): + Controls if the search document request requires the return + of a total size of matched documents. See + [SearchDocumentsResponse.total_size][google.cloud.contentwarehouse.v1.SearchDocumentsResponse.total_size]. qa_size_limit (int): Experimental, do not use. The limit on the number of documents returned for the question-answering feature. To @@ -328,6 +350,24 @@ class SearchDocumentsRequest(proto.Message): [DocumentQuery].[is_nl_query][] to true. """ + class TotalResultSize(proto.Enum): + r"""The total number of matching documents. + + Values: + TOTAL_RESULT_SIZE_UNSPECIFIED (0): + Total number calculation will be skipped. + ESTIMATED_SIZE (1): + Estimate total number. The total result size + will be accurated up to 10,000. This option will + add cost and latency to your request. + ACTUAL_SIZE (2): + It may adversely impact performance. The + limit is 1000,000. + """ + TOTAL_RESULT_SIZE_UNSPECIFIED = 0 + ESTIMATED_SIZE = 1 + ACTUAL_SIZE = 2 + parent: str = proto.Field( proto.STRING, number=1, @@ -367,12 +407,45 @@ class SearchDocumentsRequest(proto.Message): proto.BOOL, number=10, ) + total_result_size: TotalResultSize = proto.Field( + proto.ENUM, + number=12, + enum=TotalResultSize, + ) qa_size_limit: int = proto.Field( proto.INT32, number=11, ) +class LockDocumentRequest(proto.Message): + r"""Request message for DocumentService.LockDocument. + + Attributes: + name (str): + Required. The name of the document to lock. Format: + projects/{project_number}/locations/{location}/documents/{document}. + collection_id (str): + The collection the document connects to. + locking_user (google.cloud.contentwarehouse_v1.types.UserInfo): + The user information who locks the document. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + collection_id: str = proto.Field( + proto.STRING, + number=2, + ) + locking_user: common.UserInfo = proto.Field( + proto.MESSAGE, + number=3, + message=common.UserInfo, + ) + + class FetchAclRequest(proto.Message): r"""Request message for DocumentService.FetchAcl @@ -381,6 +454,8 @@ class FetchAclRequest(proto.Message): Required. REQUIRED: The resource for which the policy is being requested. Format for document: projects/{project_number}/locations/{location}/documents/{document_id}. + Format for collection: + projects/{project_number}/locations/{location}/collections/{collection_id}. Format for project: projects/{project_number}. request_metadata (google.cloud.contentwarehouse_v1.types.RequestMetadata): The meta information collected about the end @@ -414,11 +489,30 @@ class SetAclRequest(proto.Message): Required. REQUIRED: The resource for which the policy is being requested. Format for document: projects/{project_number}/locations/{location}/documents/{document_id}. + Format for collection: + projects/{project_number}/locations/{location}/collections/{collection_id}. Format for project: projects/{project_number}. policy (google.iam.v1.policy_pb2.Policy): Required. REQUIRED: The complete policy to be applied to the ``resource``. The size of the policy is limited to a few 10s - of KB. + of KB. This refers to an Identity and Access (IAM) policy, + which specifies access controls for the Document. + + You can set ACL with condition for projects only. + + Supported operators are: ``=``, ``!=``, ``<``, ``<=``, + ``>``, and ``>=`` where the left of the operator is + ``DocumentSchemaId`` or property name and the right of the + operator is a number or a quoted string. You must escape + backslash (\) and quote (") characters. + + Boolean expressions (AND/OR) are supported up to 3 levels of + nesting (for example, "((A AND B AND C) OR D) AND E"), a + maximum of 10 comparisons are allowed in the expression. The + expression must be < 6000 bytes in length. + + Sample condition: + ``"DocumentSchemaId = \"some schema id\" OR SchemaId.floatPropertyName >= 10"`` request_metadata (google.cloud.contentwarehouse_v1.types.RequestMetadata): The meta information collected about the end user, used to enforce access control for the diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/filters.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/filters.py index 51f5348c603a..83c28747ca5e 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/filters.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/filters.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.type import interval_pb2 # type: ignore @@ -25,6 +27,8 @@ "TimeFilter", "PropertyFilter", "FileTypeFilter", + "CustomWeightsMetadata", + "WeightedSchemaProperty", }, ) @@ -34,10 +38,53 @@ class DocumentQuery(proto.Message): Attributes: query (str): - The query string that matches against the - full text of the document and the searchable - properties. The maximum number of allowed - characters is 255. + The query string that matches against the full text of the + document and the searchable properties. + + The query partially supports `Google AIP style + syntax `__. Specifically, the + query supports literals, logical operators, negation + operators, comparison operators, and functions. + + Literals: A bare literal value (examples: "42", "Hugo") is a + value to be matched against. It searches over the full text + of the document and the searchable properties. + + Logical operators: "AND", "and", "OR", and "or" are binary + logical operators (example: "engineer OR developer"). + + Negation operators: "NOT" and "!" are negation operators + (example: "NOT software"). + + Comparison operators: support the binary comparison + operators =, !=, <, >, <= and >= for string, numeric, enum, + boolean. Also support like operator ``~~`` for string. It + provides semantic search functionality by parsing, stemming + and doing synonyms expansion against the input query. + + To specify a property in the query, the left hand side + expression in the comparison must be the property ID + including the parent. The right hand side must be literals. + For example: ""projects/123/locations/us".property_a < 1" + matches results whose "property_a" is less than 1 in project + 123 and us location. The literals and comparison expression + can be connected in a single query (example: "software + engineer "projects/123/locations/us".salary > 100"). + + Functions: supported functions are + ``LOWER([property_name])`` to perform a case insensitive + match and ``EMPTY([property_name])`` to filter on the + existence of a key. + + Support nested expressions connected using parenthesis and + logical operators. The default logical operators is ``AND`` + if there is no operators between expressions. + + The query can be used with other filters e.g. + ``time_filters`` and ``folder_name_filter``. They are + connected with ``AND`` operator under the hood. + + The maximum number of allowed characters is 255. is_nl_query (bool): Experimental, do not use. If the query is a natural language question. False by default. If true, then the @@ -122,6 +169,14 @@ class DocumentQuery(proto.Message): If multiple values are specified, documents within the search results may be associated with any of the specified creators. + custom_weights_metadata (google.cloud.contentwarehouse_v1.types.CustomWeightsMetadata): + To support the custom weighting across + document schemas, customers need to provide the + properties to be used to boost the ranking in + the search request. For a search query with + CustomWeightsMetadata specified, only the + RetrievalImportance for the properties in the + CustomWeightsMetadata will be honored. """ query: str = proto.Field( @@ -167,6 +222,11 @@ class DocumentQuery(proto.Message): proto.STRING, number=11, ) + custom_weights_metadata: "CustomWeightsMetadata" = proto.Field( + proto.MESSAGE, + number=13, + message="CustomWeightsMetadata", + ) class TimeFilter(proto.Message): @@ -310,4 +370,43 @@ class FileType(proto.Enum): ) +class CustomWeightsMetadata(proto.Message): + r"""To support the custom weighting across document schemas. + + Attributes: + weighted_schema_properties (MutableSequence[google.cloud.contentwarehouse_v1.types.WeightedSchemaProperty]): + List of schema and property name. Allows a + maximum of 10 schemas to be specified for + relevance boosting. + """ + + weighted_schema_properties: MutableSequence[ + "WeightedSchemaProperty" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="WeightedSchemaProperty", + ) + + +class WeightedSchemaProperty(proto.Message): + r"""Specifies the schema property name. + + Attributes: + document_schema_name (str): + The document schema name. + property_names (MutableSequence[str]): + The property definition names in the schema. + """ + + document_schema_name: str = proto.Field( + proto.STRING, + number=1, + ) + property_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/histogram.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/histogram.py index 2be966fe86e5..8be54c2eb94b 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/histogram.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/histogram.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/rule_engine.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/rule_engine.py index e729f2f2b934..be312aab82bc 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/rule_engine.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/rule_engine.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.iam.v1 import policy_pb2 # type: ignore @@ -102,11 +104,11 @@ class Rule(proto.Message): """ class TriggerType(proto.Enum): - r""" + r"""The trigger types for actions. Values: UNKNOWN (0): - + Trigger for unknown action. ON_CREATE (1): Trigger for create document action. ON_UPDATE (4): @@ -252,7 +254,7 @@ class OperationType(proto.Enum): Values: UNKNOWN (0): - + The unknown operation type. ADD_POLICY_BINDING (1): Adds newly given policy bindings in the existing bindings list. @@ -542,7 +544,7 @@ class State(proto.Enum): Values: UNKNOWN (0): - + The unknown state. ACTION_SUCCEEDED (1): State indicating action executed successfully. diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/ruleset_service.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/ruleset_service.py index 6d4c220f9d11..551bb247743d 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/ruleset_service.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/ruleset_service.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # - +import proto # type: ignore __protobuf__ = proto.module( package="google.cloud.contentwarehouse.v1", diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/ruleset_service_request.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/ruleset_service_request.py index 62546cb1d4a0..af1918e967dd 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/ruleset_service_request.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/ruleset_service_request.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset.py index d55e46551d39..55d0b84b2dab 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset_service.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset_service.py index 6d4c220f9d11..551bb247743d 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset_service.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset_service.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # - +import proto # type: ignore __protobuf__ = proto.module( package="google.cloud.contentwarehouse.v1", diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset_service_request.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset_service_request.py index fb4832efcde8..b8e5de907106 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset_service_request.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset_service_request.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-contentwarehouse/noxfile.py b/packages/google-cloud-contentwarehouse/noxfile.py index fcacc5d528dd..1749edb5dad7 100644 --- a/packages/google-cloud-contentwarehouse/noxfile.py +++ b/packages/google-cloud-contentwarehouse/noxfile.py @@ -189,9 +189,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -378,8 +378,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", diff --git a/packages/google-cloud-contentwarehouse/samples/generated_samples/contentwarehouse_v1_generated_document_service_lock_document_async.py b/packages/google-cloud-contentwarehouse/samples/generated_samples/contentwarehouse_v1_generated_document_service_lock_document_async.py new file mode 100644 index 000000000000..0dfbcf90a01b --- /dev/null +++ b/packages/google-cloud-contentwarehouse/samples/generated_samples/contentwarehouse_v1_generated_document_service_lock_document_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LockDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contentwarehouse + + +# [START contentwarehouse_v1_generated_DocumentService_LockDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contentwarehouse_v1 + + +async def sample_lock_document(): + # Create a client + client = contentwarehouse_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = contentwarehouse_v1.LockDocumentRequest( + name="name_value", + ) + + # Make the request + response = await client.lock_document(request=request) + + # Handle the response + print(response) + +# [END contentwarehouse_v1_generated_DocumentService_LockDocument_async] diff --git a/packages/google-cloud-contentwarehouse/samples/generated_samples/contentwarehouse_v1_generated_document_service_lock_document_sync.py b/packages/google-cloud-contentwarehouse/samples/generated_samples/contentwarehouse_v1_generated_document_service_lock_document_sync.py new file mode 100644 index 000000000000..0ba8d159e644 --- /dev/null +++ b/packages/google-cloud-contentwarehouse/samples/generated_samples/contentwarehouse_v1_generated_document_service_lock_document_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LockDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contentwarehouse + + +# [START contentwarehouse_v1_generated_DocumentService_LockDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contentwarehouse_v1 + + +def sample_lock_document(): + # Create a client + client = contentwarehouse_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = contentwarehouse_v1.LockDocumentRequest( + name="name_value", + ) + + # Make the request + response = client.lock_document(request=request) + + # Handle the response + print(response) + +# [END contentwarehouse_v1_generated_DocumentService_LockDocument_sync] diff --git a/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json b/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json index f85d36c9dc8e..76704841ef98 100644 --- a/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json +++ b/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-contentwarehouse", - "version": "0.3.1" + "version": "0.5.0" }, "snippets": [ { @@ -2118,6 +2118,167 @@ ], "title": "contentwarehouse_v1_generated_document_service_get_document_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient.lock_document", + "method": { + "fullName": "google.cloud.contentwarehouse.v1.DocumentService.LockDocument", + "service": { + "fullName": "google.cloud.contentwarehouse.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "LockDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contentwarehouse_v1.types.LockDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contentwarehouse_v1.types.Document", + "shortName": "lock_document" + }, + "description": "Sample for LockDocument", + "file": "contentwarehouse_v1_generated_document_service_lock_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contentwarehouse_v1_generated_DocumentService_LockDocument_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contentwarehouse_v1_generated_document_service_lock_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient.lock_document", + "method": { + "fullName": "google.cloud.contentwarehouse.v1.DocumentService.LockDocument", + "service": { + "fullName": "google.cloud.contentwarehouse.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "LockDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contentwarehouse_v1.types.LockDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.contentwarehouse_v1.types.Document", + "shortName": "lock_document" + }, + "description": "Sample for LockDocument", + "file": "contentwarehouse_v1_generated_document_service_lock_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contentwarehouse_v1_generated_DocumentService_LockDocument_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contentwarehouse_v1_generated_document_service_lock_document_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-contentwarehouse/scripts/fixup_contentwarehouse_v1_keywords.py b/packages/google-cloud-contentwarehouse/scripts/fixup_contentwarehouse_v1_keywords.py index cd92d3ffa23f..b60d9421b887 100644 --- a/packages/google-cloud-contentwarehouse/scripts/fixup_contentwarehouse_v1_keywords.py +++ b/packages/google-cloud-contentwarehouse/scripts/fixup_contentwarehouse_v1_keywords.py @@ -59,7 +59,8 @@ class contentwarehouseCallTransformer(cst.CSTTransformer): 'list_linked_targets': ('parent', 'request_metadata', ), 'list_rule_sets': ('parent', 'page_size', 'page_token', ), 'list_synonym_sets': ('parent', 'page_size', 'page_token', ), - 'search_documents': ('parent', 'request_metadata', 'document_query', 'offset', 'page_size', 'page_token', 'order_by', 'histogram_queries', 'require_total_size', 'qa_size_limit', ), + 'lock_document': ('name', 'collection_id', 'locking_user', ), + 'search_documents': ('parent', 'request_metadata', 'document_query', 'offset', 'page_size', 'page_token', 'order_by', 'histogram_queries', 'require_total_size', 'total_result_size', 'qa_size_limit', ), 'set_acl': ('resource', 'policy', 'request_metadata', 'project_owner', ), 'update_document': ('name', 'document', 'request_metadata', 'cloud_ai_document_option', 'update_options', ), 'update_document_schema': ('name', 'document_schema', ), diff --git a/packages/google-cloud-contentwarehouse/setup.py b/packages/google-cloud-contentwarehouse/setup.py index 27ff3e6b0f80..fee9b28d2bcc 100644 --- a/packages/google-cloud-contentwarehouse/setup.py +++ b/packages/google-cloud-contentwarehouse/setup.py @@ -42,8 +42,8 @@ "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", - "google-cloud-documentai >= 1.2.1, < 3.0.0dev", + "google-cloud-documentai >= 2.0.0, <3.0.0dev", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] url = "https://github.com/googleapis/google-cloud-python" @@ -59,9 +59,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.cloud"] setuptools.setup( name=name, diff --git a/packages/google-cloud-contentwarehouse/testing/constraints-3.10.txt b/packages/google-cloud-contentwarehouse/testing/constraints-3.10.txt index 068438ef4e0a..0397b143465c 100644 --- a/packages/google-cloud-contentwarehouse/testing/constraints-3.10.txt +++ b/packages/google-cloud-contentwarehouse/testing/constraints-3.10.txt @@ -4,5 +4,5 @@ google-api-core proto-plus protobuf -grpc-google-iam-v1 google-cloud-documentai +grpc-google-iam-v1 diff --git a/packages/google-cloud-contentwarehouse/testing/constraints-3.11.txt b/packages/google-cloud-contentwarehouse/testing/constraints-3.11.txt index 068438ef4e0a..0397b143465c 100644 --- a/packages/google-cloud-contentwarehouse/testing/constraints-3.11.txt +++ b/packages/google-cloud-contentwarehouse/testing/constraints-3.11.txt @@ -4,5 +4,5 @@ google-api-core proto-plus protobuf -grpc-google-iam-v1 google-cloud-documentai +grpc-google-iam-v1 diff --git a/packages/google-cloud-contentwarehouse/testing/constraints-3.12.txt b/packages/google-cloud-contentwarehouse/testing/constraints-3.12.txt index 068438ef4e0a..0397b143465c 100644 --- a/packages/google-cloud-contentwarehouse/testing/constraints-3.12.txt +++ b/packages/google-cloud-contentwarehouse/testing/constraints-3.12.txt @@ -4,5 +4,5 @@ google-api-core proto-plus protobuf -grpc-google-iam-v1 google-cloud-documentai +grpc-google-iam-v1 diff --git a/packages/google-cloud-contentwarehouse/testing/constraints-3.7.txt b/packages/google-cloud-contentwarehouse/testing/constraints-3.7.txt index 9f001120ceba..45230603ec98 100644 --- a/packages/google-cloud-contentwarehouse/testing/constraints-3.7.txt +++ b/packages/google-cloud-contentwarehouse/testing/constraints-3.7.txt @@ -7,5 +7,5 @@ google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 +google-cloud-documentai==2.0.0 grpc-google-iam-v1==0.12.4 -google-cloud-documentai==1.2.1 diff --git a/packages/google-cloud-contentwarehouse/testing/constraints-3.8.txt b/packages/google-cloud-contentwarehouse/testing/constraints-3.8.txt index 068438ef4e0a..0397b143465c 100644 --- a/packages/google-cloud-contentwarehouse/testing/constraints-3.8.txt +++ b/packages/google-cloud-contentwarehouse/testing/constraints-3.8.txt @@ -4,5 +4,5 @@ google-api-core proto-plus protobuf -grpc-google-iam-v1 google-cloud-documentai +grpc-google-iam-v1 diff --git a/packages/google-cloud-contentwarehouse/testing/constraints-3.9.txt b/packages/google-cloud-contentwarehouse/testing/constraints-3.9.txt index 068438ef4e0a..0397b143465c 100644 --- a/packages/google-cloud-contentwarehouse/testing/constraints-3.9.txt +++ b/packages/google-cloud-contentwarehouse/testing/constraints-3.9.txt @@ -4,5 +4,5 @@ google-api-core proto-plus protobuf -grpc-google-iam-v1 google-cloud-documentai +grpc-google-iam-v1 diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py index 20636360f5c0..9f1df55c60e0 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -32,12 +34,15 @@ from google.auth.exceptions import MutualTLSChannelError from google.longrunning import operations_pb2 from google.oauth2 import service_account +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.contentwarehouse_v1.services.document_link_service import ( DocumentLinkServiceAsyncClient, @@ -102,6 +107,7 @@ def test__get_default_mtls_endpoint(): [ (DocumentLinkServiceClient, "grpc"), (DocumentLinkServiceAsyncClient, "grpc_asyncio"), + (DocumentLinkServiceClient, "rest"), ], ) def test_document_link_service_client_from_service_account_info( @@ -117,7 +123,11 @@ def test_document_link_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("contentwarehouse.googleapis.com:443") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) @pytest.mark.parametrize( @@ -125,6 +135,7 @@ def test_document_link_service_client_from_service_account_info( [ (transports.DocumentLinkServiceGrpcTransport, "grpc"), (transports.DocumentLinkServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DocumentLinkServiceRestTransport, "rest"), ], ) def test_document_link_service_client_service_account_always_use_jwt( @@ -150,6 +161,7 @@ def test_document_link_service_client_service_account_always_use_jwt( [ (DocumentLinkServiceClient, "grpc"), (DocumentLinkServiceAsyncClient, "grpc_asyncio"), + (DocumentLinkServiceClient, "rest"), ], ) def test_document_link_service_client_from_service_account_file( @@ -172,13 +184,18 @@ def test_document_link_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("contentwarehouse.googleapis.com:443") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) def test_document_link_service_client_get_transport_class(): transport = DocumentLinkServiceClient.get_transport_class() available_transports = [ transports.DocumentLinkServiceGrpcTransport, + transports.DocumentLinkServiceRestTransport, ] assert transport in available_transports @@ -199,6 +216,11 @@ def test_document_link_service_client_get_transport_class(): transports.DocumentLinkServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + DocumentLinkServiceClient, + transports.DocumentLinkServiceRestTransport, + "rest", + ), ], ) @mock.patch.object( @@ -354,6 +376,18 @@ def test_document_link_service_client_client_options( "grpc_asyncio", "false", ), + ( + DocumentLinkServiceClient, + transports.DocumentLinkServiceRestTransport, + "rest", + "true", + ), + ( + DocumentLinkServiceClient, + transports.DocumentLinkServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -557,6 +591,11 @@ def test_document_link_service_client_get_mtls_endpoint_and_cert_source(client_c transports.DocumentLinkServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + DocumentLinkServiceClient, + transports.DocumentLinkServiceRestTransport, + "rest", + ), ], ) def test_document_link_service_client_client_options_scopes( @@ -597,6 +636,12 @@ def test_document_link_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + DocumentLinkServiceClient, + transports.DocumentLinkServiceRestTransport, + "rest", + None, + ), ], ) def test_document_link_service_client_client_options_credentials_file( @@ -1900,162 +1945,1331 @@ async def test_delete_document_link_flattened_error_async(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DocumentLinkServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + document_link_service.ListLinkedTargetsRequest, + dict, + ], +) +def test_list_linked_targets_rest(request_type): + client = DocumentLinkServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = DocumentLinkServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_link_service.ListLinkedTargetsResponse( + next_page_token="next_page_token_value", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.DocumentLinkServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DocumentLinkServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_link_service.ListLinkedTargetsResponse.pb( + return_value ) + json_return_value = json_format.MessageToJson(pb_return_value) - # It is an error to provide an api_key and a transport instance. - transport = transports.DocumentLinkServiceGrpcTransport( + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_linked_targets(request) + + assert response.raw_page is response + + # Establish that the response is the type that we expect. + assert isinstance(response, document_link_service.ListLinkedTargetsResponse) + assert response.next_page_token == "next_page_token_value" + + +def test_list_linked_targets_rest_required_fields( + request_type=document_link_service.ListLinkedTargetsRequest, +): + transport_class = transports.DocumentLinkServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_linked_targets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_linked_targets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentLinkServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DocumentLinkServiceClient( - client_options=options, - transport=transport, - ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_link_service.ListLinkedTargetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_link_service.ListLinkedTargetsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DocumentLinkServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # It is an error to provide scopes and a transport instance. - transport = transports.DocumentLinkServiceGrpcTransport( + response = client.list_linked_targets(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_linked_targets_rest_unset_required_fields(): + transport = transports.DocumentLinkServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_linked_targets._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_linked_targets_rest_interceptors(null_interceptor): + transport = transports.DocumentLinkServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentLinkServiceRestInterceptor(), ) - with pytest.raises(ValueError): - client = DocumentLinkServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client = DocumentLinkServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentLinkServiceRestInterceptor, "post_list_linked_targets" + ) as post, mock.patch.object( + transports.DocumentLinkServiceRestInterceptor, "pre_list_linked_targets" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_link_service.ListLinkedTargetsRequest.pb( + document_link_service.ListLinkedTargetsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + document_link_service.ListLinkedTargetsResponse.to_json( + document_link_service.ListLinkedTargetsResponse() + ) ) + request = document_link_service.ListLinkedTargetsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_link_service.ListLinkedTargetsResponse() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DocumentLinkServiceGrpcTransport( + client.list_linked_targets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_linked_targets_rest_bad_request( + transport: str = "rest", request_type=document_link_service.ListLinkedTargetsRequest +): + client = DocumentLinkServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = DocumentLinkServiceClient(transport=transport) - assert client.transport is transport + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DocumentLinkServiceGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_linked_targets(request) + + +def test_list_linked_targets_rest_flattened(): + client = DocumentLinkServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.DocumentLinkServiceGrpcAsyncIOTransport( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_link_service.ListLinkedTargetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/documents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_link_service.ListLinkedTargetsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_linked_targets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/documents/*}/linkedTargets" + % client.transport._host, + args[1], + ) + + +def test_list_linked_targets_rest_flattened_error(transport: str = "rest"): + client = DocumentLinkServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_linked_targets( + document_link_service.ListLinkedTargetsRequest(), + parent="parent_value", + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.DocumentLinkServiceGrpcTransport, - transports.DocumentLinkServiceGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + +def test_list_linked_targets_rest_error(): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + document_link_service.ListLinkedSourcesRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = DocumentLinkServiceClient.get_transport_class(transport_name)( +def test_list_linked_sources_rest(request_type): + client = DocumentLinkServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_link_service.ListLinkedSourcesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_link_service.ListLinkedSourcesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_linked_sources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLinkedSourcesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_linked_sources_rest_required_fields( + request_type=document_link_service.ListLinkedSourcesRequest, +): + transport_class = transports.DocumentLinkServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_linked_sources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_linked_sources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. client = DocumentLinkServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert isinstance( - client.transport, - transports.DocumentLinkServiceGrpcTransport, + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_link_service.ListLinkedSourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_link_service.ListLinkedSourcesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_linked_sources(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_linked_sources_rest_unset_required_fields(): + transport = transports.DocumentLinkServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) + unset_fields = transport.list_linked_sources._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) -def test_document_link_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DocumentLinkServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_linked_sources_rest_interceptors(null_interceptor): + transport = transports.DocumentLinkServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentLinkServiceRestInterceptor(), + ) + client = DocumentLinkServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentLinkServiceRestInterceptor, "post_list_linked_sources" + ) as post, mock.patch.object( + transports.DocumentLinkServiceRestInterceptor, "pre_list_linked_sources" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_link_service.ListLinkedSourcesRequest.pb( + document_link_service.ListLinkedSourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + document_link_service.ListLinkedSourcesResponse.to_json( + document_link_service.ListLinkedSourcesResponse() + ) ) + request = document_link_service.ListLinkedSourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_link_service.ListLinkedSourcesResponse() -def test_document_link_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.contentwarehouse_v1.services.document_link_service.transports.DocumentLinkServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.DocumentLinkServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), + client.list_linked_sources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "list_linked_targets", - "list_linked_sources", - "create_document_link", - "delete_document_link", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) + pre.assert_called_once() + post.assert_called_once() - with pytest.raises(NotImplementedError): - transport.close() - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() +def test_list_linked_sources_rest_bad_request( + transport: str = "rest", request_type=document_link_service.ListLinkedSourcesRequest +): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_linked_sources(request) + + +def test_list_linked_sources_rest_flattened(): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_link_service.ListLinkedSourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/documents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_link_service.ListLinkedSourcesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_linked_sources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/documents/*}/linkedSources" + % client.transport._host, + args[1], + ) + + +def test_list_linked_sources_rest_flattened_error(transport: str = "rest"): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_linked_sources( + document_link_service.ListLinkedSourcesRequest(), + parent="parent_value", + ) + + +def test_list_linked_sources_rest_pager(transport: str = "rest"): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_link_service.ListLinkedSourcesResponse( + document_links=[ + document_link_service.DocumentLink(), + document_link_service.DocumentLink(), + document_link_service.DocumentLink(), + ], + next_page_token="abc", + ), + document_link_service.ListLinkedSourcesResponse( + document_links=[], + next_page_token="def", + ), + document_link_service.ListLinkedSourcesResponse( + document_links=[ + document_link_service.DocumentLink(), + ], + next_page_token="ghi", + ), + document_link_service.ListLinkedSourcesResponse( + document_links=[ + document_link_service.DocumentLink(), + document_link_service.DocumentLink(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_link_service.ListLinkedSourcesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/documents/sample3" + } + + pager = client.list_linked_sources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document_link_service.DocumentLink) for i in results) + + pages = list(client.list_linked_sources(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_link_service.CreateDocumentLinkRequest, + dict, + ], +) +def test_create_document_link_rest(request_type): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_link_service.DocumentLink( + name="name_value", + description="description_value", + state=document_link_service.DocumentLink.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_link_service.DocumentLink.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_document_link(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document_link_service.DocumentLink) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == document_link_service.DocumentLink.State.ACTIVE + + +def test_create_document_link_rest_required_fields( + request_type=document_link_service.CreateDocumentLinkRequest, +): + transport_class = transports.DocumentLinkServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_document_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_document_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_link_service.DocumentLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_link_service.DocumentLink.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_document_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_document_link_rest_unset_required_fields(): + transport = transports.DocumentLinkServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_document_link._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "documentLink", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_document_link_rest_interceptors(null_interceptor): + transport = transports.DocumentLinkServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentLinkServiceRestInterceptor(), + ) + client = DocumentLinkServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentLinkServiceRestInterceptor, "post_create_document_link" + ) as post, mock.patch.object( + transports.DocumentLinkServiceRestInterceptor, "pre_create_document_link" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_link_service.CreateDocumentLinkRequest.pb( + document_link_service.CreateDocumentLinkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document_link_service.DocumentLink.to_json( + document_link_service.DocumentLink() + ) + + request = document_link_service.CreateDocumentLinkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_link_service.DocumentLink() + + client.create_document_link( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_document_link_rest_bad_request( + transport: str = "rest", + request_type=document_link_service.CreateDocumentLinkRequest, +): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_document_link(request) + + +def test_create_document_link_rest_flattened(): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_link_service.DocumentLink() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/documents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + document_link=document_link_service.DocumentLink(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_link_service.DocumentLink.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_document_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/documents/*}/documentLinks" + % client.transport._host, + args[1], + ) + + +def test_create_document_link_rest_flattened_error(transport: str = "rest"): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_document_link( + document_link_service.CreateDocumentLinkRequest(), + parent="parent_value", + document_link=document_link_service.DocumentLink(name="name_value"), + ) + + +def test_create_document_link_rest_error(): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_link_service.DeleteDocumentLinkRequest, + dict, + ], +) +def test_delete_document_link_rest(request_type): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/documents/sample3/documentLinks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_document_link(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_document_link_rest_required_fields( + request_type=document_link_service.DeleteDocumentLinkRequest, +): + transport_class = transports.DocumentLinkServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_document_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_document_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_document_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_document_link_rest_unset_required_fields(): + transport = transports.DocumentLinkServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_document_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_document_link_rest_interceptors(null_interceptor): + transport = transports.DocumentLinkServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentLinkServiceRestInterceptor(), + ) + client = DocumentLinkServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentLinkServiceRestInterceptor, "pre_delete_document_link" + ) as pre: + pre.assert_not_called() + pb_message = document_link_service.DeleteDocumentLinkRequest.pb( + document_link_service.DeleteDocumentLinkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = document_link_service.DeleteDocumentLinkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_document_link( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_document_link_rest_bad_request( + transport: str = "rest", + request_type=document_link_service.DeleteDocumentLinkRequest, +): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/documents/sample3/documentLinks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_document_link(request) + + +def test_delete_document_link_rest_flattened(): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/documents/sample3/documentLinks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_document_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/documents/*/documentLinks/*}:delete" + % client.transport._host, + args[1], + ) + + +def test_delete_document_link_rest_flattened_error(transport: str = "rest"): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_document_link( + document_link_service.DeleteDocumentLinkRequest(), + name="name_value", + ) + + +def test_delete_document_link_rest_error(): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DocumentLinkServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DocumentLinkServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DocumentLinkServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DocumentLinkServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DocumentLinkServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DocumentLinkServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DocumentLinkServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DocumentLinkServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DocumentLinkServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DocumentLinkServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DocumentLinkServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DocumentLinkServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DocumentLinkServiceGrpcTransport, + transports.DocumentLinkServiceGrpcAsyncIOTransport, + transports.DocumentLinkServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DocumentLinkServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DocumentLinkServiceGrpcTransport, + ) + + +def test_document_link_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DocumentLinkServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_document_link_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.contentwarehouse_v1.services.document_link_service.transports.DocumentLinkServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DocumentLinkServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_linked_targets", + "list_linked_sources", + "create_document_link", + "delete_document_link", + "get_operation", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() def test_document_link_service_base_transport_with_credentials_file(): @@ -2127,6 +3341,7 @@ def test_document_link_service_transport_auth_adc(transport_class): [ transports.DocumentLinkServiceGrpcTransport, transports.DocumentLinkServiceGrpcAsyncIOTransport, + transports.DocumentLinkServiceRestTransport, ], ) def test_document_link_service_transport_auth_gdch_credentials(transport_class): @@ -2226,11 +3441,23 @@ def test_document_link_service_grpc_transport_client_cert_source_for_mtls( ) +def test_document_link_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DocumentLinkServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_document_link_service_host_no_port(transport_name): @@ -2241,7 +3468,11 @@ def test_document_link_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("contentwarehouse.googleapis.com:443") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) @pytest.mark.parametrize( @@ -2249,6 +3480,7 @@ def test_document_link_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_document_link_service_host_with_port(transport_name): @@ -2259,7 +3491,42 @@ def test_document_link_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("contentwarehouse.googleapis.com:8000") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_document_link_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DocumentLinkServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DocumentLinkServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_linked_targets._session + session2 = client2.transport.list_linked_targets._session + assert session1 != session2 + session1 = client1.transport.list_linked_sources._session + session2 = client2.transport.list_linked_sources._session + assert session1 != session2 + session1 = client1.transport.create_document_link._session + session2 = client2.transport.create_document_link._session + assert session1 != session2 + session1 = client1.transport.delete_document_link._session + session2 = client2.transport.delete_document_link._session + assert session1 != session2 def test_document_link_service_grpc_transport_channel(): @@ -2585,8 +3852,212 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation(transport: str = "grpc"): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = DocumentLinkServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DocumentLinkServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DocumentLinkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DocumentLinkServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2604,6 +4075,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py index a0822d522f5f..e024f7db8f2c 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -32,12 +34,15 @@ from google.auth.exceptions import MutualTLSChannelError from google.longrunning import operations_pb2 from google.oauth2 import service_account +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.contentwarehouse_v1.services.document_schema_service import ( DocumentSchemaServiceAsyncClient, @@ -102,6 +107,7 @@ def test__get_default_mtls_endpoint(): [ (DocumentSchemaServiceClient, "grpc"), (DocumentSchemaServiceAsyncClient, "grpc_asyncio"), + (DocumentSchemaServiceClient, "rest"), ], ) def test_document_schema_service_client_from_service_account_info( @@ -117,7 +123,11 @@ def test_document_schema_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("contentwarehouse.googleapis.com:443") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) @pytest.mark.parametrize( @@ -125,6 +135,7 @@ def test_document_schema_service_client_from_service_account_info( [ (transports.DocumentSchemaServiceGrpcTransport, "grpc"), (transports.DocumentSchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DocumentSchemaServiceRestTransport, "rest"), ], ) def test_document_schema_service_client_service_account_always_use_jwt( @@ -150,6 +161,7 @@ def test_document_schema_service_client_service_account_always_use_jwt( [ (DocumentSchemaServiceClient, "grpc"), (DocumentSchemaServiceAsyncClient, "grpc_asyncio"), + (DocumentSchemaServiceClient, "rest"), ], ) def test_document_schema_service_client_from_service_account_file( @@ -172,13 +184,18 @@ def test_document_schema_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("contentwarehouse.googleapis.com:443") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) def test_document_schema_service_client_get_transport_class(): transport = DocumentSchemaServiceClient.get_transport_class() available_transports = [ transports.DocumentSchemaServiceGrpcTransport, + transports.DocumentSchemaServiceRestTransport, ] assert transport in available_transports @@ -199,6 +216,11 @@ def test_document_schema_service_client_get_transport_class(): transports.DocumentSchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + DocumentSchemaServiceClient, + transports.DocumentSchemaServiceRestTransport, + "rest", + ), ], ) @mock.patch.object( @@ -354,6 +376,18 @@ def test_document_schema_service_client_client_options( "grpc_asyncio", "false", ), + ( + DocumentSchemaServiceClient, + transports.DocumentSchemaServiceRestTransport, + "rest", + "true", + ), + ( + DocumentSchemaServiceClient, + transports.DocumentSchemaServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -557,6 +591,11 @@ def test_document_schema_service_client_get_mtls_endpoint_and_cert_source(client transports.DocumentSchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + DocumentSchemaServiceClient, + transports.DocumentSchemaServiceRestTransport, + "rest", + ), ], ) def test_document_schema_service_client_client_options_scopes( @@ -597,6 +636,12 @@ def test_document_schema_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + DocumentSchemaServiceClient, + transports.DocumentSchemaServiceRestTransport, + "rest", + None, + ), ], ) def test_document_schema_service_client_client_options_credentials_file( @@ -2184,160 +2229,1706 @@ async def test_list_document_schemas_async_pages(): assert page_.raw_page.next_page_token == token -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DocumentSchemaServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + document_schema_service.CreateDocumentSchemaRequest, + dict, + ], +) +def test_create_document_schema_rest(request_type): + client = DocumentSchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = DocumentSchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["document_schema"] = { + "name": "name_value", + "display_name": "display_name_value", + "property_definitions": [ + { + "name": "name_value", + "display_name": "display_name_value", + "is_repeatable": True, + "is_filterable": True, + "is_searchable": True, + "is_metadata": True, + "is_required": True, + "retrieval_importance": 1, + "integer_type_options": {}, + "float_type_options": {}, + "text_type_options": {}, + "property_type_options": {"property_definitions": {}}, + "enum_type_options": { + "possible_values": [ + "possible_values_value1", + "possible_values_value2", + ], + "validation_check_disabled": True, + }, + "date_time_type_options": {}, + "map_type_options": {}, + "timestamp_type_options": {}, + "schema_sources": [ + {"name": "name_value", "processor_type": "processor_type_value"} + ], + } + ], + "document_is_folder": True, + "update_time": {"seconds": 751, "nanos": 543}, + "create_time": {}, + "description": "description_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcc_document_schema.DocumentSchema( + name="name_value", + display_name="display_name_value", + document_is_folder=True, + description="description_value", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.DocumentSchemaServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DocumentSchemaServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcc_document_schema.DocumentSchema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_document_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcc_document_schema.DocumentSchema) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.document_is_folder is True + assert response.description == "description_value" + + +def test_create_document_schema_rest_required_fields( + request_type=document_schema_service.CreateDocumentSchemaRequest, +): + transport_class = transports.DocumentSchemaServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.DocumentSchemaServiceGrpcTransport( + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_document_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_document_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentSchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcc_document_schema.DocumentSchema() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcc_document_schema.DocumentSchema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_document_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_document_schema_rest_unset_required_fields(): + transport = transports.DocumentSchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DocumentSchemaServiceClient( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DocumentSchemaServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + unset_fields = transport.create_document_schema._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "documentSchema", + ) ) + ) - # It is an error to provide scopes and a transport instance. - transport = transports.DocumentSchemaServiceGrpcTransport( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_document_schema_rest_interceptors(null_interceptor): + transport = transports.DocumentSchemaServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentSchemaServiceRestInterceptor(), ) - with pytest.raises(ValueError): - client = DocumentSchemaServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client = DocumentSchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentSchemaServiceRestInterceptor, "post_create_document_schema" + ) as post, mock.patch.object( + transports.DocumentSchemaServiceRestInterceptor, "pre_create_document_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_schema_service.CreateDocumentSchemaRequest.pb( + document_schema_service.CreateDocumentSchemaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcc_document_schema.DocumentSchema.to_json( + gcc_document_schema.DocumentSchema() ) + request = document_schema_service.CreateDocumentSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcc_document_schema.DocumentSchema() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DocumentSchemaServiceGrpcTransport( + client.create_document_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_document_schema_rest_bad_request( + transport: str = "rest", + request_type=document_schema_service.CreateDocumentSchemaRequest, +): + client = DocumentSchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = DocumentSchemaServiceClient(transport=transport) - assert client.transport is transport + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["document_schema"] = { + "name": "name_value", + "display_name": "display_name_value", + "property_definitions": [ + { + "name": "name_value", + "display_name": "display_name_value", + "is_repeatable": True, + "is_filterable": True, + "is_searchable": True, + "is_metadata": True, + "is_required": True, + "retrieval_importance": 1, + "integer_type_options": {}, + "float_type_options": {}, + "text_type_options": {}, + "property_type_options": {"property_definitions": {}}, + "enum_type_options": { + "possible_values": [ + "possible_values_value1", + "possible_values_value2", + ], + "validation_check_disabled": True, + }, + "date_time_type_options": {}, + "map_type_options": {}, + "timestamp_type_options": {}, + "schema_sources": [ + {"name": "name_value", "processor_type": "processor_type_value"} + ], + } + ], + "document_is_folder": True, + "update_time": {"seconds": 751, "nanos": 543}, + "create_time": {}, + "description": "description_value", + } + request = request_type(**request_init) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DocumentSchemaServiceGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_document_schema(request) + + +def test_create_document_schema_rest_flattened(): + client = DocumentSchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.DocumentSchemaServiceGrpcAsyncIOTransport( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcc_document_schema.DocumentSchema() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + document_schema=gcc_document_schema.DocumentSchema(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcc_document_schema.DocumentSchema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_document_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/documentSchemas" + % client.transport._host, + args[1], + ) + + +def test_create_document_schema_rest_flattened_error(transport: str = "rest"): + client = DocumentSchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_document_schema( + document_schema_service.CreateDocumentSchemaRequest(), + parent="parent_value", + document_schema=gcc_document_schema.DocumentSchema(name="name_value"), + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.DocumentSchemaServiceGrpcTransport, - transports.DocumentSchemaServiceGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + +def test_create_document_schema_rest_error(): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + document_schema_service.UpdateDocumentSchemaRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = DocumentSchemaServiceClient.get_transport_class(transport_name)( +def test_update_document_schema_rest(request_type): + client = DocumentSchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/documentSchemas/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcc_document_schema.DocumentSchema( + name="name_value", + display_name="display_name_value", + document_is_folder=True, + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcc_document_schema.DocumentSchema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_document_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcc_document_schema.DocumentSchema) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.document_is_folder is True + assert response.description == "description_value" + + +def test_update_document_schema_rest_required_fields( + request_type=document_schema_service.UpdateDocumentSchemaRequest, +): + transport_class = transports.DocumentSchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_document_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_document_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. client = DocumentSchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert isinstance( - client.transport, - transports.DocumentSchemaServiceGrpcTransport, + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcc_document_schema.DocumentSchema() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcc_document_schema.DocumentSchema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_document_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_document_schema_rest_unset_required_fields(): + transport = transports.DocumentSchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) + unset_fields = transport.update_document_schema._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "documentSchema", + ) + ) + ) -def test_document_schema_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DocumentSchemaServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_document_schema_rest_interceptors(null_interceptor): + transport = transports.DocumentSchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentSchemaServiceRestInterceptor(), + ) + client = DocumentSchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentSchemaServiceRestInterceptor, "post_update_document_schema" + ) as post, mock.patch.object( + transports.DocumentSchemaServiceRestInterceptor, "pre_update_document_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_schema_service.UpdateDocumentSchemaRequest.pb( + document_schema_service.UpdateDocumentSchemaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcc_document_schema.DocumentSchema.to_json( + gcc_document_schema.DocumentSchema() ) + request = document_schema_service.UpdateDocumentSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcc_document_schema.DocumentSchema() -def test_document_schema_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.contentwarehouse_v1.services.document_schema_service.transports.DocumentSchemaServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.DocumentSchemaServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), + client.update_document_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_document_schema", - "update_document_schema", - "get_document_schema", - "delete_document_schema", - "list_document_schemas", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) + pre.assert_called_once() + post.assert_called_once() - with pytest.raises(NotImplementedError): - transport.close() - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] +def test_update_document_schema_rest_bad_request( + transport: str = "rest", + request_type=document_schema_service.UpdateDocumentSchemaRequest, +): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/documentSchemas/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_document_schema(request) + + +def test_update_document_schema_rest_flattened(): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcc_document_schema.DocumentSchema() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/documentSchemas/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + document_schema=gcc_document_schema.DocumentSchema(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcc_document_schema.DocumentSchema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_document_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/documentSchemas/*}" + % client.transport._host, + args[1], + ) + + +def test_update_document_schema_rest_flattened_error(transport: str = "rest"): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_document_schema( + document_schema_service.UpdateDocumentSchemaRequest(), + name="name_value", + document_schema=gcc_document_schema.DocumentSchema(name="name_value"), + ) + + +def test_update_document_schema_rest_error(): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_schema_service.GetDocumentSchemaRequest, + dict, + ], +) +def test_get_document_schema_rest(request_type): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/documentSchemas/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_schema.DocumentSchema( + name="name_value", + display_name="display_name_value", + document_is_folder=True, + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_schema.DocumentSchema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_document_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document_schema.DocumentSchema) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.document_is_folder is True + assert response.description == "description_value" + + +def test_get_document_schema_rest_required_fields( + request_type=document_schema_service.GetDocumentSchemaRequest, +): + transport_class = transports.DocumentSchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_schema.DocumentSchema() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_schema.DocumentSchema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_document_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_document_schema_rest_unset_required_fields(): + transport = transports.DocumentSchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_document_schema._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_schema_rest_interceptors(null_interceptor): + transport = transports.DocumentSchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentSchemaServiceRestInterceptor(), + ) + client = DocumentSchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentSchemaServiceRestInterceptor, "post_get_document_schema" + ) as post, mock.patch.object( + transports.DocumentSchemaServiceRestInterceptor, "pre_get_document_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_schema_service.GetDocumentSchemaRequest.pb( + document_schema_service.GetDocumentSchemaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document_schema.DocumentSchema.to_json( + document_schema.DocumentSchema() + ) + + request = document_schema_service.GetDocumentSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_schema.DocumentSchema() + + client.get_document_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_document_schema_rest_bad_request( + transport: str = "rest", + request_type=document_schema_service.GetDocumentSchemaRequest, +): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/documentSchemas/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_document_schema(request) + + +def test_get_document_schema_rest_flattened(): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_schema.DocumentSchema() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/documentSchemas/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_schema.DocumentSchema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_document_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/documentSchemas/*}" + % client.transport._host, + args[1], + ) + + +def test_get_document_schema_rest_flattened_error(transport: str = "rest"): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document_schema( + document_schema_service.GetDocumentSchemaRequest(), + name="name_value", + ) + + +def test_get_document_schema_rest_error(): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_schema_service.DeleteDocumentSchemaRequest, + dict, + ], +) +def test_delete_document_schema_rest(request_type): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/documentSchemas/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_document_schema(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_document_schema_rest_required_fields( + request_type=document_schema_service.DeleteDocumentSchemaRequest, +): + transport_class = transports.DocumentSchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_document_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_document_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_document_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_document_schema_rest_unset_required_fields(): + transport = transports.DocumentSchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_document_schema._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_document_schema_rest_interceptors(null_interceptor): + transport = transports.DocumentSchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentSchemaServiceRestInterceptor(), + ) + client = DocumentSchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentSchemaServiceRestInterceptor, "pre_delete_document_schema" + ) as pre: + pre.assert_not_called() + pb_message = document_schema_service.DeleteDocumentSchemaRequest.pb( + document_schema_service.DeleteDocumentSchemaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = document_schema_service.DeleteDocumentSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_document_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_document_schema_rest_bad_request( + transport: str = "rest", + request_type=document_schema_service.DeleteDocumentSchemaRequest, +): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/documentSchemas/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_document_schema(request) + + +def test_delete_document_schema_rest_flattened(): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/documentSchemas/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_document_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/documentSchemas/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_document_schema_rest_flattened_error(transport: str = "rest"): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_document_schema( + document_schema_service.DeleteDocumentSchemaRequest(), + name="name_value", + ) + + +def test_delete_document_schema_rest_error(): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_schema_service.ListDocumentSchemasRequest, + dict, + ], +) +def test_list_document_schemas_rest(request_type): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_schema_service.ListDocumentSchemasResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_schema_service.ListDocumentSchemasResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_document_schemas(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentSchemasPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_document_schemas_rest_required_fields( + request_type=document_schema_service.ListDocumentSchemasRequest, +): + transport_class = transports.DocumentSchemaServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_document_schemas._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_document_schemas._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_schema_service.ListDocumentSchemasResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_schema_service.ListDocumentSchemasResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_document_schemas(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_document_schemas_rest_unset_required_fields(): + transport = transports.DocumentSchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_document_schemas._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_document_schemas_rest_interceptors(null_interceptor): + transport = transports.DocumentSchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentSchemaServiceRestInterceptor(), + ) + client = DocumentSchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentSchemaServiceRestInterceptor, "post_list_document_schemas" + ) as post, mock.patch.object( + transports.DocumentSchemaServiceRestInterceptor, "pre_list_document_schemas" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_schema_service.ListDocumentSchemasRequest.pb( + document_schema_service.ListDocumentSchemasRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + document_schema_service.ListDocumentSchemasResponse.to_json( + document_schema_service.ListDocumentSchemasResponse() + ) + ) + + request = document_schema_service.ListDocumentSchemasRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_schema_service.ListDocumentSchemasResponse() + + client.list_document_schemas( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_document_schemas_rest_bad_request( + transport: str = "rest", + request_type=document_schema_service.ListDocumentSchemasRequest, +): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_document_schemas(request) + + +def test_list_document_schemas_rest_flattened(): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_schema_service.ListDocumentSchemasResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_schema_service.ListDocumentSchemasResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_document_schemas(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/documentSchemas" + % client.transport._host, + args[1], + ) + + +def test_list_document_schemas_rest_flattened_error(transport: str = "rest"): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_document_schemas( + document_schema_service.ListDocumentSchemasRequest(), + parent="parent_value", + ) + + +def test_list_document_schemas_rest_pager(transport: str = "rest"): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_schema_service.ListDocumentSchemasResponse( + document_schemas=[ + document_schema.DocumentSchema(), + document_schema.DocumentSchema(), + document_schema.DocumentSchema(), + ], + next_page_token="abc", + ), + document_schema_service.ListDocumentSchemasResponse( + document_schemas=[], + next_page_token="def", + ), + document_schema_service.ListDocumentSchemasResponse( + document_schemas=[ + document_schema.DocumentSchema(), + ], + next_page_token="ghi", + ), + document_schema_service.ListDocumentSchemasResponse( + document_schemas=[ + document_schema.DocumentSchema(), + document_schema.DocumentSchema(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_schema_service.ListDocumentSchemasResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_document_schemas(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document_schema.DocumentSchema) for i in results) + + pages = list(client.list_document_schemas(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DocumentSchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DocumentSchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DocumentSchemaServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DocumentSchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DocumentSchemaServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DocumentSchemaServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DocumentSchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DocumentSchemaServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DocumentSchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DocumentSchemaServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DocumentSchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DocumentSchemaServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DocumentSchemaServiceGrpcTransport, + transports.DocumentSchemaServiceGrpcAsyncIOTransport, + transports.DocumentSchemaServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DocumentSchemaServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DocumentSchemaServiceGrpcTransport, + ) + + +def test_document_schema_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DocumentSchemaServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_document_schema_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.contentwarehouse_v1.services.document_schema_service.transports.DocumentSchemaServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DocumentSchemaServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_document_schema", + "update_document_schema", + "get_document_schema", + "delete_document_schema", + "list_document_schemas", + "get_operation", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] for r in remainder: with pytest.raises(NotImplementedError): getattr(transport, r)() @@ -2412,6 +4003,7 @@ def test_document_schema_service_transport_auth_adc(transport_class): [ transports.DocumentSchemaServiceGrpcTransport, transports.DocumentSchemaServiceGrpcAsyncIOTransport, + transports.DocumentSchemaServiceRestTransport, ], ) def test_document_schema_service_transport_auth_gdch_credentials(transport_class): @@ -2513,11 +4105,23 @@ def test_document_schema_service_grpc_transport_client_cert_source_for_mtls( ) +def test_document_schema_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DocumentSchemaServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_document_schema_service_host_no_port(transport_name): @@ -2528,7 +4132,11 @@ def test_document_schema_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("contentwarehouse.googleapis.com:443") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) @pytest.mark.parametrize( @@ -2536,6 +4144,7 @@ def test_document_schema_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_document_schema_service_host_with_port(transport_name): @@ -2546,7 +4155,45 @@ def test_document_schema_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("contentwarehouse.googleapis.com:8000") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_document_schema_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DocumentSchemaServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DocumentSchemaServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_document_schema._session + session2 = client2.transport.create_document_schema._session + assert session1 != session2 + session1 = client1.transport.update_document_schema._session + session2 = client2.transport.update_document_schema._session + assert session1 != session2 + session1 = client1.transport.get_document_schema._session + session2 = client2.transport.get_document_schema._session + assert session1 != session2 + session1 = client1.transport.delete_document_schema._session + session2 = client2.transport.delete_document_schema._session + assert session1 != session2 + session1 = client1.transport.list_document_schemas._session + session2 = client2.transport.list_document_schemas._session + assert session1 != session2 def test_document_schema_service_grpc_transport_channel(): @@ -2866,8 +4513,212 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation(transport: str = "grpc"): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = DocumentSchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DocumentSchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DocumentSchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DocumentSchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2885,6 +4736,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py index a13633429cf0..cfd615bff861 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -30,12 +32,16 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.documentai_v1.types import barcode +from google.cloud.documentai_v1.types import document as gcd_document +from google.cloud.documentai_v1.types import geometry from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore @@ -51,6 +57,8 @@ from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.contentwarehouse_v1.services.document_service import ( DocumentServiceAsyncClient, @@ -118,6 +126,7 @@ def test__get_default_mtls_endpoint(): [ (DocumentServiceClient, "grpc"), (DocumentServiceAsyncClient, "grpc_asyncio"), + (DocumentServiceClient, "rest"), ], ) def test_document_service_client_from_service_account_info( @@ -133,7 +142,11 @@ def test_document_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("contentwarehouse.googleapis.com:443") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) @pytest.mark.parametrize( @@ -141,6 +154,7 @@ def test_document_service_client_from_service_account_info( [ (transports.DocumentServiceGrpcTransport, "grpc"), (transports.DocumentServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DocumentServiceRestTransport, "rest"), ], ) def test_document_service_client_service_account_always_use_jwt( @@ -166,6 +180,7 @@ def test_document_service_client_service_account_always_use_jwt( [ (DocumentServiceClient, "grpc"), (DocumentServiceAsyncClient, "grpc_asyncio"), + (DocumentServiceClient, "rest"), ], ) def test_document_service_client_from_service_account_file( @@ -188,13 +203,18 @@ def test_document_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("contentwarehouse.googleapis.com:443") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) def test_document_service_client_get_transport_class(): transport = DocumentServiceClient.get_transport_class() available_transports = [ transports.DocumentServiceGrpcTransport, + transports.DocumentServiceRestTransport, ] assert transport in available_transports @@ -211,6 +231,7 @@ def test_document_service_client_get_transport_class(): transports.DocumentServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (DocumentServiceClient, transports.DocumentServiceRestTransport, "rest"), ], ) @mock.patch.object( @@ -366,6 +387,18 @@ def test_document_service_client_client_options( "grpc_asyncio", "false", ), + ( + DocumentServiceClient, + transports.DocumentServiceRestTransport, + "rest", + "true", + ), + ( + DocumentServiceClient, + transports.DocumentServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -565,6 +598,7 @@ def test_document_service_client_get_mtls_endpoint_and_cert_source(client_class) transports.DocumentServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (DocumentServiceClient, transports.DocumentServiceRestTransport, "rest"), ], ) def test_document_service_client_client_options_scopes( @@ -605,6 +639,7 @@ def test_document_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (DocumentServiceClient, transports.DocumentServiceRestTransport, "rest", None), ], ) def test_document_service_client_client_options_credentials_file( @@ -984,7 +1019,9 @@ def test_get_document(request_type, transport: str = "grpc"): structured_content_uri="structured_content_uri_value", raw_document_file_type=gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF, async_enabled=True, + content_category=gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE, text_extraction_disabled=True, + text_extraction_enabled=True, creator="creator_value", updater="updater_value", plain_text="plain_text_value", @@ -1011,7 +1048,11 @@ def test_get_document(request_type, transport: str = "grpc"): == gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF ) assert response.async_enabled is True + assert ( + response.content_category == gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE + ) assert response.text_extraction_disabled is True + assert response.text_extraction_enabled is True assert response.creator == "creator_value" assert response.updater == "updater_value" @@ -1060,7 +1101,9 @@ async def test_get_document_async( structured_content_uri="structured_content_uri_value", raw_document_file_type=gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF, async_enabled=True, + content_category=gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE, text_extraction_disabled=True, + text_extraction_enabled=True, creator="creator_value", updater="updater_value", ) @@ -1086,7 +1129,11 @@ async def test_get_document_async( == gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF ) assert response.async_enabled is True + assert ( + response.content_category == gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE + ) assert response.text_extraction_disabled is True + assert response.text_extraction_enabled is True assert response.creator == "creator_value" assert response.updater == "updater_value" @@ -2131,11 +2178,11 @@ async def test_search_documents_async_pages(): @pytest.mark.parametrize( "request_type", [ - document_service_request.FetchAclRequest, + document_service_request.LockDocumentRequest, dict, ], ) -def test_fetch_acl(request_type, transport: str = "grpc"): +def test_lock_document(request_type, transport: str = "grpc"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2146,21 +2193,57 @@ def test_fetch_acl(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.fetch_acl), "__call__") as call: + with mock.patch.object(type(client.transport.lock_document), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = document_service.FetchAclResponse() - response = client.fetch_acl(request) + call.return_value = gcc_document.Document( + name="name_value", + reference_id="reference_id_value", + display_name="display_name_value", + title="title_value", + display_uri="display_uri_value", + document_schema_name="document_schema_name_value", + structured_content_uri="structured_content_uri_value", + raw_document_file_type=gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF, + async_enabled=True, + content_category=gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE, + text_extraction_disabled=True, + text_extraction_enabled=True, + creator="creator_value", + updater="updater_value", + plain_text="plain_text_value", + raw_document_path="raw_document_path_value", + ) + response = client.lock_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == document_service_request.FetchAclRequest() + assert args[0] == document_service_request.LockDocumentRequest() # Establish that the response is the type that we expect. - assert isinstance(response, document_service.FetchAclResponse) + assert isinstance(response, gcc_document.Document) + assert response.name == "name_value" + assert response.reference_id == "reference_id_value" + assert response.display_name == "display_name_value" + assert response.title == "title_value" + assert response.display_uri == "display_uri_value" + assert response.document_schema_name == "document_schema_name_value" + assert response.structured_content_uri == "structured_content_uri_value" + assert ( + response.raw_document_file_type + == gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF + ) + assert response.async_enabled is True + assert ( + response.content_category == gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE + ) + assert response.text_extraction_disabled is True + assert response.text_extraction_enabled is True + assert response.creator == "creator_value" + assert response.updater == "updater_value" -def test_fetch_acl_empty_call(): +def test_lock_document_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DocumentServiceClient( @@ -2169,17 +2252,17 @@ def test_fetch_acl_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.fetch_acl), "__call__") as call: - client.fetch_acl() + with mock.patch.object(type(client.transport.lock_document), "__call__") as call: + client.lock_document() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == document_service_request.FetchAclRequest() + assert args[0] == document_service_request.LockDocumentRequest() @pytest.mark.asyncio -async def test_fetch_acl_async( +async def test_lock_document_async( transport: str = "grpc_asyncio", - request_type=document_service_request.FetchAclRequest, + request_type=document_service_request.LockDocumentRequest, ): client = DocumentServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2191,42 +2274,76 @@ async def test_fetch_acl_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.fetch_acl), "__call__") as call: + with mock.patch.object(type(client.transport.lock_document), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - document_service.FetchAclResponse() + gcc_document.Document( + name="name_value", + reference_id="reference_id_value", + display_name="display_name_value", + title="title_value", + display_uri="display_uri_value", + document_schema_name="document_schema_name_value", + structured_content_uri="structured_content_uri_value", + raw_document_file_type=gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF, + async_enabled=True, + content_category=gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE, + text_extraction_disabled=True, + text_extraction_enabled=True, + creator="creator_value", + updater="updater_value", + ) ) - response = await client.fetch_acl(request) + response = await client.lock_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == document_service_request.FetchAclRequest() + assert args[0] == document_service_request.LockDocumentRequest() # Establish that the response is the type that we expect. - assert isinstance(response, document_service.FetchAclResponse) + assert isinstance(response, gcc_document.Document) + assert response.name == "name_value" + assert response.reference_id == "reference_id_value" + assert response.display_name == "display_name_value" + assert response.title == "title_value" + assert response.display_uri == "display_uri_value" + assert response.document_schema_name == "document_schema_name_value" + assert response.structured_content_uri == "structured_content_uri_value" + assert ( + response.raw_document_file_type + == gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF + ) + assert response.async_enabled is True + assert ( + response.content_category == gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE + ) + assert response.text_extraction_disabled is True + assert response.text_extraction_enabled is True + assert response.creator == "creator_value" + assert response.updater == "updater_value" @pytest.mark.asyncio -async def test_fetch_acl_async_from_dict(): - await test_fetch_acl_async(request_type=dict) +async def test_lock_document_async_from_dict(): + await test_lock_document_async(request_type=dict) -def test_fetch_acl_field_headers(): +def test_lock_document_field_headers(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = document_service_request.FetchAclRequest() + request = document_service_request.LockDocumentRequest() - request.resource = "resource_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.fetch_acl), "__call__") as call: - call.return_value = document_service.FetchAclResponse() - client.fetch_acl(request) + with mock.patch.object(type(client.transport.lock_document), "__call__") as call: + call.return_value = gcc_document.Document() + client.lock_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2237,28 +2354,28 @@ def test_fetch_acl_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_fetch_acl_field_headers_async(): +async def test_lock_document_field_headers_async(): client = DocumentServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = document_service_request.FetchAclRequest() + request = document_service_request.LockDocumentRequest() - request.resource = "resource_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.fetch_acl), "__call__") as call: + with mock.patch.object(type(client.transport.lock_document), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - document_service.FetchAclResponse() + gcc_document.Document() ) - await client.fetch_acl(request) + await client.lock_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2269,35 +2386,35 @@ async def test_fetch_acl_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "name=name_value", ) in kw["metadata"] -def test_fetch_acl_flattened(): +def test_lock_document_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.fetch_acl), "__call__") as call: + with mock.patch.object(type(client.transport.lock_document), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = document_service.FetchAclResponse() + call.return_value = gcc_document.Document() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.fetch_acl( - resource="resource_value", + client.lock_document( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_fetch_acl_flattened_error(): +def test_lock_document_flattened_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2305,43 +2422,43 @@ def test_fetch_acl_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.fetch_acl( - document_service_request.FetchAclRequest(), - resource="resource_value", + client.lock_document( + document_service_request.LockDocumentRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_fetch_acl_flattened_async(): +async def test_lock_document_flattened_async(): client = DocumentServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.fetch_acl), "__call__") as call: + with mock.patch.object(type(client.transport.lock_document), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = document_service.FetchAclResponse() + call.return_value = gcc_document.Document() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - document_service.FetchAclResponse() + gcc_document.Document() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.fetch_acl( - resource="resource_value", + response = await client.lock_document( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_fetch_acl_flattened_error_async(): +async def test_lock_document_flattened_error_async(): client = DocumentServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2349,20 +2466,20 @@ async def test_fetch_acl_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.fetch_acl( - document_service_request.FetchAclRequest(), - resource="resource_value", + await client.lock_document( + document_service_request.LockDocumentRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - document_service_request.SetAclRequest, + document_service_request.FetchAclRequest, dict, ], ) -def test_set_acl(request_type, transport: str = "grpc"): +def test_fetch_acl(request_type, transport: str = "grpc"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2373,21 +2490,21 @@ def test_set_acl(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_acl), "__call__") as call: + with mock.patch.object(type(client.transport.fetch_acl), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = document_service.SetAclResponse() - response = client.set_acl(request) + call.return_value = document_service.FetchAclResponse() + response = client.fetch_acl(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == document_service_request.SetAclRequest() + assert args[0] == document_service_request.FetchAclRequest() # Establish that the response is the type that we expect. - assert isinstance(response, document_service.SetAclResponse) + assert isinstance(response, document_service.FetchAclResponse) -def test_set_acl_empty_call(): +def test_fetch_acl_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DocumentServiceClient( @@ -2396,16 +2513,17 @@ def test_set_acl_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_acl), "__call__") as call: - client.set_acl() + with mock.patch.object(type(client.transport.fetch_acl), "__call__") as call: + client.fetch_acl() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == document_service_request.SetAclRequest() + assert args[0] == document_service_request.FetchAclRequest() @pytest.mark.asyncio -async def test_set_acl_async( - transport: str = "grpc_asyncio", request_type=document_service_request.SetAclRequest +async def test_fetch_acl_async( + transport: str = "grpc_asyncio", + request_type=document_service_request.FetchAclRequest, ): client = DocumentServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2417,42 +2535,42 @@ async def test_set_acl_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_acl), "__call__") as call: + with mock.patch.object(type(client.transport.fetch_acl), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - document_service.SetAclResponse() + document_service.FetchAclResponse() ) - response = await client.set_acl(request) + response = await client.fetch_acl(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == document_service_request.SetAclRequest() + assert args[0] == document_service_request.FetchAclRequest() # Establish that the response is the type that we expect. - assert isinstance(response, document_service.SetAclResponse) + assert isinstance(response, document_service.FetchAclResponse) @pytest.mark.asyncio -async def test_set_acl_async_from_dict(): - await test_set_acl_async(request_type=dict) +async def test_fetch_acl_async_from_dict(): + await test_fetch_acl_async(request_type=dict) -def test_set_acl_field_headers(): +def test_fetch_acl_field_headers(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = document_service_request.SetAclRequest() + request = document_service_request.FetchAclRequest() request.resource = "resource_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_acl), "__call__") as call: - call.return_value = document_service.SetAclResponse() - client.set_acl(request) + with mock.patch.object(type(client.transport.fetch_acl), "__call__") as call: + call.return_value = document_service.FetchAclResponse() + client.fetch_acl(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2468,23 +2586,23 @@ def test_set_acl_field_headers(): @pytest.mark.asyncio -async def test_set_acl_field_headers_async(): +async def test_fetch_acl_field_headers_async(): client = DocumentServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = document_service_request.SetAclRequest() + request = document_service_request.FetchAclRequest() request.resource = "resource_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_acl), "__call__") as call: + with mock.patch.object(type(client.transport.fetch_acl), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - document_service.SetAclResponse() + document_service.FetchAclResponse() ) - await client.set_acl(request) + await client.fetch_acl(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2499,20 +2617,19 @@ async def test_set_acl_field_headers_async(): ) in kw["metadata"] -def test_set_acl_flattened(): +def test_fetch_acl_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_acl), "__call__") as call: + with mock.patch.object(type(client.transport.fetch_acl), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = document_service.SetAclResponse() + call.return_value = document_service.FetchAclResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.set_acl( + client.fetch_acl( resource="resource_value", - policy=policy_pb2.Policy(version=774), ) # Establish that the underlying call was made with the expected @@ -2522,12 +2639,9 @@ def test_set_acl_flattened(): arg = args[0].resource mock_val = "resource_value" assert arg == mock_val - arg = args[0].policy - mock_val = policy_pb2.Policy(version=774) - assert arg == mock_val -def test_set_acl_flattened_error(): +def test_fetch_acl_flattened_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2535,32 +2649,30 @@ def test_set_acl_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_acl( - document_service_request.SetAclRequest(), + client.fetch_acl( + document_service_request.FetchAclRequest(), resource="resource_value", - policy=policy_pb2.Policy(version=774), ) @pytest.mark.asyncio -async def test_set_acl_flattened_async(): +async def test_fetch_acl_flattened_async(): client = DocumentServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_acl), "__call__") as call: + with mock.patch.object(type(client.transport.fetch_acl), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = document_service.SetAclResponse() + call.return_value = document_service.FetchAclResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - document_service.SetAclResponse() + document_service.FetchAclResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.set_acl( + response = await client.fetch_acl( resource="resource_value", - policy=policy_pb2.Policy(version=774), ) # Establish that the underlying call was made with the expected @@ -2570,13 +2682,10 @@ async def test_set_acl_flattened_async(): arg = args[0].resource mock_val = "resource_value" assert arg == mock_val - arg = args[0].policy - mock_val = policy_pb2.Policy(version=774) - assert arg == mock_val @pytest.mark.asyncio -async def test_set_acl_flattened_error_async(): +async def test_fetch_acl_flattened_error_async(): client = DocumentServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2584,14 +2693,2543 @@ async def test_set_acl_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.set_acl( - document_service_request.SetAclRequest(), + await client.fetch_acl( + document_service_request.FetchAclRequest(), resource="resource_value", - policy=policy_pb2.Policy(version=774), ) -def test_credentials_transport_error(): +@pytest.mark.parametrize( + "request_type", + [ + document_service_request.SetAclRequest, + dict, + ], +) +def test_set_acl(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_acl), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.SetAclResponse() + response = client.set_acl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == document_service_request.SetAclRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.SetAclResponse) + + +def test_set_acl_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_acl), "__call__") as call: + client.set_acl() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service_request.SetAclRequest() + + +@pytest.mark.asyncio +async def test_set_acl_async( + transport: str = "grpc_asyncio", request_type=document_service_request.SetAclRequest +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_acl), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.SetAclResponse() + ) + response = await client.set_acl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == document_service_request.SetAclRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.SetAclResponse) + + +@pytest.mark.asyncio +async def test_set_acl_async_from_dict(): + await test_set_acl_async(request_type=dict) + + +def test_set_acl_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service_request.SetAclRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_acl), "__call__") as call: + call.return_value = document_service.SetAclResponse() + client.set_acl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_acl_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service_request.SetAclRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_acl), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.SetAclResponse() + ) + await client.set_acl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_set_acl_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_acl), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.SetAclResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_acl( + resource="resource_value", + policy=policy_pb2.Policy(version=774), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + arg = args[0].policy + mock_val = policy_pb2.Policy(version=774) + assert arg == mock_val + + +def test_set_acl_flattened_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_acl( + document_service_request.SetAclRequest(), + resource="resource_value", + policy=policy_pb2.Policy(version=774), + ) + + +@pytest.mark.asyncio +async def test_set_acl_flattened_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_acl), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.SetAclResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.SetAclResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_acl( + resource="resource_value", + policy=policy_pb2.Policy(version=774), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + arg = args[0].policy + mock_val = policy_pb2.Policy(version=774) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_acl_flattened_error_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_acl( + document_service_request.SetAclRequest(), + resource="resource_value", + policy=policy_pb2.Policy(version=774), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service_request.CreateDocumentRequest, + dict, + ], +) +def test_create_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.CreateDocumentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_service.CreateDocumentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.CreateDocumentResponse) + + +def test_create_document_rest_required_fields( + request_type=document_service_request.CreateDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_service.CreateDocumentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_service.CreateDocumentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_document._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "document", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_create_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_create_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service_request.CreateDocumentRequest.pb( + document_service_request.CreateDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document_service.CreateDocumentResponse.to_json( + document_service.CreateDocumentResponse() + ) + + request = document_service_request.CreateDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_service.CreateDocumentResponse() + + client.create_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_document_rest_bad_request( + transport: str = "rest", request_type=document_service_request.CreateDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_document(request) + + +def test_create_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.CreateDocumentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + document=gcc_document.Document(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_service.CreateDocumentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/documents" % client.transport._host, + args[1], + ) + + +def test_create_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_document( + document_service_request.CreateDocumentRequest(), + parent="parent_value", + document=gcc_document.Document(name="name_value"), + ) + + +def test_create_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service_request.GetDocumentRequest, + dict, + ], +) +def test_get_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcc_document.Document( + name="name_value", + reference_id="reference_id_value", + display_name="display_name_value", + title="title_value", + display_uri="display_uri_value", + document_schema_name="document_schema_name_value", + structured_content_uri="structured_content_uri_value", + raw_document_file_type=gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF, + async_enabled=True, + content_category=gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE, + text_extraction_disabled=True, + text_extraction_enabled=True, + creator="creator_value", + updater="updater_value", + plain_text="plain_text_value", + raw_document_path="raw_document_path_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcc_document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcc_document.Document) + assert response.name == "name_value" + assert response.reference_id == "reference_id_value" + assert response.display_name == "display_name_value" + assert response.title == "title_value" + assert response.display_uri == "display_uri_value" + assert response.document_schema_name == "document_schema_name_value" + assert response.structured_content_uri == "structured_content_uri_value" + assert ( + response.raw_document_file_type + == gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF + ) + assert response.async_enabled is True + assert ( + response.content_category == gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE + ) + assert response.text_extraction_disabled is True + assert response.text_extraction_enabled is True + assert response.creator == "creator_value" + assert response.updater == "updater_value" + + +def test_get_document_rest_required_fields( + request_type=document_service_request.GetDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcc_document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcc_document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_get_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service_request.GetDocumentRequest.pb( + document_service_request.GetDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcc_document.Document.to_json( + gcc_document.Document() + ) + + request = document_service_request.GetDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcc_document.Document() + + client.get_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_document_rest_bad_request( + transport: str = "rest", request_type=document_service_request.GetDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_document(request) + + +def test_get_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcc_document.Document() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/documents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcc_document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/documents/*}:get" + % client.transport._host, + args[1], + ) + + +def test_get_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + document_service_request.GetDocumentRequest(), + name="name_value", + ) + + +def test_get_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service_request.UpdateDocumentRequest, + dict, + ], +) +def test_update_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.UpdateDocumentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_service.UpdateDocumentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.UpdateDocumentResponse) + + +def test_update_document_rest_required_fields( + request_type=document_service_request.UpdateDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_service.UpdateDocumentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_service.UpdateDocumentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_document._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "document", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_update_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_update_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service_request.UpdateDocumentRequest.pb( + document_service_request.UpdateDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document_service.UpdateDocumentResponse.to_json( + document_service.UpdateDocumentResponse() + ) + + request = document_service_request.UpdateDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_service.UpdateDocumentResponse() + + client.update_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_document_rest_bad_request( + transport: str = "rest", request_type=document_service_request.UpdateDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_document(request) + + +def test_update_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.UpdateDocumentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/documents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + document=gcc_document.Document(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_service.UpdateDocumentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/documents/*}" % client.transport._host, + args[1], + ) + + +def test_update_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_document( + document_service_request.UpdateDocumentRequest(), + name="name_value", + document=gcc_document.Document(name="name_value"), + ) + + +def test_update_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service_request.DeleteDocumentRequest, + dict, + ], +) +def test_delete_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_document(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_document_rest_required_fields( + request_type=document_service_request.DeleteDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_delete_document" + ) as pre: + pre.assert_not_called() + pb_message = document_service_request.DeleteDocumentRequest.pb( + document_service_request.DeleteDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = document_service_request.DeleteDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_document_rest_bad_request( + transport: str = "rest", request_type=document_service_request.DeleteDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_document(request) + + +def test_delete_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/documents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/documents/*}:delete" + % client.transport._host, + args[1], + ) + + +def test_delete_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_document( + document_service_request.DeleteDocumentRequest(), + name="name_value", + ) + + +def test_delete_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service_request.SearchDocumentsRequest, + dict, + ], +) +def test_search_documents_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.SearchDocumentsResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_service.SearchDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.search_documents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchDocumentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_search_documents_rest_required_fields( + request_type=document_service_request.SearchDocumentsRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_service.SearchDocumentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_service.SearchDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.search_documents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_search_documents_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.search_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_documents_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_search_documents" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_search_documents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service_request.SearchDocumentsRequest.pb( + document_service_request.SearchDocumentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document_service.SearchDocumentsResponse.to_json( + document_service.SearchDocumentsResponse() + ) + + request = document_service_request.SearchDocumentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_service.SearchDocumentsResponse() + + client.search_documents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_documents_rest_bad_request( + transport: str = "rest", + request_type=document_service_request.SearchDocumentsRequest, +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_documents(request) + + +def test_search_documents_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.SearchDocumentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_service.SearchDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.search_documents(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/documents:search" + % client.transport._host, + args[1], + ) + + +def test_search_documents_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_documents( + document_service_request.SearchDocumentsRequest(), + parent="parent_value", + ) + + +def test_search_documents_rest_pager(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_service.SearchDocumentsResponse( + matching_documents=[ + document_service.SearchDocumentsResponse.MatchingDocument(), + document_service.SearchDocumentsResponse.MatchingDocument(), + document_service.SearchDocumentsResponse.MatchingDocument(), + ], + next_page_token="abc", + ), + document_service.SearchDocumentsResponse( + matching_documents=[], + next_page_token="def", + ), + document_service.SearchDocumentsResponse( + matching_documents=[ + document_service.SearchDocumentsResponse.MatchingDocument(), + ], + next_page_token="ghi", + ), + document_service.SearchDocumentsResponse( + matching_documents=[ + document_service.SearchDocumentsResponse.MatchingDocument(), + document_service.SearchDocumentsResponse.MatchingDocument(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_service.SearchDocumentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.search_documents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, document_service.SearchDocumentsResponse.MatchingDocument) + for i in results + ) + + pages = list(client.search_documents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_service_request.LockDocumentRequest, + dict, + ], +) +def test_lock_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcc_document.Document( + name="name_value", + reference_id="reference_id_value", + display_name="display_name_value", + title="title_value", + display_uri="display_uri_value", + document_schema_name="document_schema_name_value", + structured_content_uri="structured_content_uri_value", + raw_document_file_type=gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF, + async_enabled=True, + content_category=gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE, + text_extraction_disabled=True, + text_extraction_enabled=True, + creator="creator_value", + updater="updater_value", + plain_text="plain_text_value", + raw_document_path="raw_document_path_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcc_document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lock_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcc_document.Document) + assert response.name == "name_value" + assert response.reference_id == "reference_id_value" + assert response.display_name == "display_name_value" + assert response.title == "title_value" + assert response.display_uri == "display_uri_value" + assert response.document_schema_name == "document_schema_name_value" + assert response.structured_content_uri == "structured_content_uri_value" + assert ( + response.raw_document_file_type + == gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF + ) + assert response.async_enabled is True + assert ( + response.content_category == gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE + ) + assert response.text_extraction_disabled is True + assert response.text_extraction_enabled is True + assert response.creator == "creator_value" + assert response.updater == "updater_value" + + +def test_lock_document_rest_required_fields( + request_type=document_service_request.LockDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lock_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lock_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcc_document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcc_document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.lock_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_lock_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.lock_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lock_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_lock_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_lock_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service_request.LockDocumentRequest.pb( + document_service_request.LockDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcc_document.Document.to_json( + gcc_document.Document() + ) + + request = document_service_request.LockDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcc_document.Document() + + client.lock_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_lock_document_rest_bad_request( + transport: str = "rest", request_type=document_service_request.LockDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.lock_document(request) + + +def test_lock_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcc_document.Document() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/documents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcc_document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.lock_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/documents/*}:lock" + % client.transport._host, + args[1], + ) + + +def test_lock_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.lock_document( + document_service_request.LockDocumentRequest(), + name="name_value", + ) + + +def test_lock_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service_request.FetchAclRequest, + dict, + ], +) +def test_fetch_acl_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.FetchAclResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_service.FetchAclResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_acl(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.FetchAclResponse) + + +def test_fetch_acl_rest_required_fields( + request_type=document_service_request.FetchAclRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_acl._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_acl._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_service.FetchAclResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_service.FetchAclResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_acl(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_acl_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_acl._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_acl_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_fetch_acl" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_fetch_acl" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service_request.FetchAclRequest.pb( + document_service_request.FetchAclRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document_service.FetchAclResponse.to_json( + document_service.FetchAclResponse() + ) + + request = document_service_request.FetchAclRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_service.FetchAclResponse() + + client.fetch_acl( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_acl_rest_bad_request( + transport: str = "rest", request_type=document_service_request.FetchAclRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_acl(request) + + +def test_fetch_acl_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.FetchAclResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/documents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_service.FetchAclResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.fetch_acl(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{resource=projects/*/locations/*/documents/*}:fetchAcl" + % client.transport._host, + args[1], + ) + + +def test_fetch_acl_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_acl( + document_service_request.FetchAclRequest(), + resource="resource_value", + ) + + +def test_fetch_acl_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service_request.SetAclRequest, + dict, + ], +) +def test_set_acl_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.SetAclResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_service.SetAclResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_acl(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.SetAclResponse) + + +def test_set_acl_rest_required_fields( + request_type=document_service_request.SetAclRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_acl._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_acl._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_service.SetAclResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_service.SetAclResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_acl(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_acl_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_acl._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "policy", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_acl_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_set_acl" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_set_acl" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service_request.SetAclRequest.pb( + document_service_request.SetAclRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document_service.SetAclResponse.to_json( + document_service.SetAclResponse() + ) + + request = document_service_request.SetAclRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_service.SetAclResponse() + + client.set_acl( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_acl_rest_bad_request( + transport: str = "rest", request_type=document_service_request.SetAclRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/documents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_acl(request) + + +def test_set_acl_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.SetAclResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/documents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + policy=policy_pb2.Policy(version=774), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_service.SetAclResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_acl(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{resource=projects/*/locations/*/documents/*}:setAcl" + % client.transport._host, + args[1], + ) + + +def test_set_acl_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_acl( + document_service_request.SetAclRequest(), + resource="resource_value", + policy=policy_pb2.Policy(version=774), + ) + + +def test_set_acl_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DocumentServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2672,6 +5310,7 @@ def test_transport_get_channel(): [ transports.DocumentServiceGrpcTransport, transports.DocumentServiceGrpcAsyncIOTransport, + transports.DocumentServiceRestTransport, ], ) def test_transport_adc(transport_class): @@ -2686,6 +5325,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -2733,8 +5373,10 @@ def test_document_service_base_transport(): "update_document", "delete_document", "search_documents", + "lock_document", "fetch_acl", "set_acl", + "get_operation", ) for method in methods: with pytest.raises(NotImplementedError): @@ -2821,6 +5463,7 @@ def test_document_service_transport_auth_adc(transport_class): [ transports.DocumentServiceGrpcTransport, transports.DocumentServiceGrpcAsyncIOTransport, + transports.DocumentServiceRestTransport, ], ) def test_document_service_transport_auth_gdch_credentials(transport_class): @@ -2918,11 +5561,23 @@ def test_document_service_grpc_transport_client_cert_source_for_mtls(transport_c ) +def test_document_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DocumentServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_document_service_host_no_port(transport_name): @@ -2933,7 +5588,11 @@ def test_document_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("contentwarehouse.googleapis.com:443") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) @pytest.mark.parametrize( @@ -2941,6 +5600,7 @@ def test_document_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_document_service_host_with_port(transport_name): @@ -2951,7 +5611,54 @@ def test_document_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("contentwarehouse.googleapis.com:8000") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_document_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DocumentServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DocumentServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_document._session + session2 = client2.transport.create_document._session + assert session1 != session2 + session1 = client1.transport.get_document._session + session2 = client2.transport.get_document._session + assert session1 != session2 + session1 = client1.transport.update_document._session + session2 = client2.transport.update_document._session + assert session1 != session2 + session1 = client1.transport.delete_document._session + session2 = client2.transport.delete_document._session + assert session1 != session2 + session1 = client1.transport.search_documents._session + session2 = client2.transport.search_documents._session + assert session1 != session2 + session1 = client1.transport.lock_document._session + session2 = client2.transport.lock_document._session + assert session1 != session2 + session1 = client1.transport.fetch_acl._session + session2 = client2.transport.fetch_acl._session + assert session1 != session2 + session1 = client1.transport.set_acl._session + session2 = client2.transport.set_acl._session + assert session1 != session2 def test_document_service_grpc_transport_channel(): @@ -3297,8 +6004,212 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation(transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -3316,6 +6227,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py index a63b8b2e1d07..c7d242715dfe 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -33,12 +35,15 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 from google.oauth2 import service_account +from google.protobuf import json_format from google.type import expr_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.contentwarehouse_v1.services.rule_set_service import ( RuleSetServiceAsyncClient, @@ -98,6 +103,7 @@ def test__get_default_mtls_endpoint(): [ (RuleSetServiceClient, "grpc"), (RuleSetServiceAsyncClient, "grpc_asyncio"), + (RuleSetServiceClient, "rest"), ], ) def test_rule_set_service_client_from_service_account_info( @@ -113,7 +119,11 @@ def test_rule_set_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("contentwarehouse.googleapis.com:443") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) @pytest.mark.parametrize( @@ -121,6 +131,7 @@ def test_rule_set_service_client_from_service_account_info( [ (transports.RuleSetServiceGrpcTransport, "grpc"), (transports.RuleSetServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.RuleSetServiceRestTransport, "rest"), ], ) def test_rule_set_service_client_service_account_always_use_jwt( @@ -146,6 +157,7 @@ def test_rule_set_service_client_service_account_always_use_jwt( [ (RuleSetServiceClient, "grpc"), (RuleSetServiceAsyncClient, "grpc_asyncio"), + (RuleSetServiceClient, "rest"), ], ) def test_rule_set_service_client_from_service_account_file( @@ -168,13 +180,18 @@ def test_rule_set_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("contentwarehouse.googleapis.com:443") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) def test_rule_set_service_client_get_transport_class(): transport = RuleSetServiceClient.get_transport_class() available_transports = [ transports.RuleSetServiceGrpcTransport, + transports.RuleSetServiceRestTransport, ] assert transport in available_transports @@ -191,6 +208,7 @@ def test_rule_set_service_client_get_transport_class(): transports.RuleSetServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (RuleSetServiceClient, transports.RuleSetServiceRestTransport, "rest"), ], ) @mock.patch.object( @@ -336,6 +354,8 @@ def test_rule_set_service_client_client_options( "grpc_asyncio", "false", ), + (RuleSetServiceClient, transports.RuleSetServiceRestTransport, "rest", "true"), + (RuleSetServiceClient, transports.RuleSetServiceRestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -535,6 +555,7 @@ def test_rule_set_service_client_get_mtls_endpoint_and_cert_source(client_class) transports.RuleSetServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (RuleSetServiceClient, transports.RuleSetServiceRestTransport, "rest"), ], ) def test_rule_set_service_client_client_options_scopes( @@ -575,6 +596,7 @@ def test_rule_set_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (RuleSetServiceClient, transports.RuleSetServiceRestTransport, "rest", None), ], ) def test_rule_set_service_client_client_options_credentials_file( @@ -2060,210 +2082,1767 @@ async def test_list_rule_sets_async_pages(): assert page_.raw_page.next_page_token == token -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.RuleSetServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + ruleset_service_request.CreateRuleSetRequest, + dict, + ], +) +def test_create_rule_set_rest(request_type): + client = RuleSetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = RuleSetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["rule_set"] = { + "name": "name_value", + "description": "description_value", + "source": "source_value", + "rules": [ + { + "description": "description_value", + "rule_id": "rule_id_value", + "trigger_type": 1, + "condition": "condition_value", + "actions": [ + { + "action_id": "action_id_value", + "access_control": { + "operation_type": 1, + "policy": { + "version": 774, + "bindings": [ + { + "role": "role_value", + "members": ["members_value1", "members_value2"], + "condition": { + "expression": "expression_value", + "title": "title_value", + "description": "description_value", + "location": "location_value", + }, + } + ], + "audit_configs": [ + { + "service": "service_value", + "audit_log_configs": [ + { + "log_type": 1, + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + } + ], + } + ], + "etag": b"etag_blob", + }, + }, + "data_validation": {"conditions": {}}, + "data_update": {"entries": {}}, + "add_to_folder": { + "folders": ["folders_value1", "folders_value2"] + }, + "publish_to_pub_sub": { + "topic_id": "topic_id_value", + "messages": ["messages_value1", "messages_value2"], + }, + "remove_from_folder_action": { + "condition": "condition_value", + "folder": "folder_value", + }, + "delete_document_action": {"enable_hard_delete": True}, + } + ], + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = rule_engine.RuleSet( + name="name_value", + description="description_value", + source="source_value", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.RuleSetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RuleSetServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = rule_engine.RuleSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_rule_set(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, rule_engine.RuleSet) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.source == "source_value" + + +def test_create_rule_set_rest_required_fields( + request_type=ruleset_service_request.CreateRuleSetRequest, +): + transport_class = transports.RuleSetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.RuleSetServiceGrpcTransport( + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_rule_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_rule_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = RuleSetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = rule_engine.RuleSet() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = rule_engine.RuleSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_rule_set(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_rule_set_rest_unset_required_fields(): + transport = transports.RuleSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = RuleSetServiceClient( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = RuleSetServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + unset_fields = transport.create_rule_set._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "ruleSet", + ) ) + ) - # It is an error to provide scopes and a transport instance. - transport = transports.RuleSetServiceGrpcTransport( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_rule_set_rest_interceptors(null_interceptor): + transport = transports.RuleSetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuleSetServiceRestInterceptor(), ) - with pytest.raises(ValueError): - client = RuleSetServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client = RuleSetServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuleSetServiceRestInterceptor, "post_create_rule_set" + ) as post, mock.patch.object( + transports.RuleSetServiceRestInterceptor, "pre_create_rule_set" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ruleset_service_request.CreateRuleSetRequest.pb( + ruleset_service_request.CreateRuleSetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = rule_engine.RuleSet.to_json(rule_engine.RuleSet()) + + request = ruleset_service_request.CreateRuleSetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = rule_engine.RuleSet() + + client.create_rule_set( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.RuleSetServiceGrpcTransport( + +def test_create_rule_set_rest_bad_request( + transport: str = "rest", request_type=ruleset_service_request.CreateRuleSetRequest +): + client = RuleSetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = RuleSetServiceClient(transport=transport) - assert client.transport is transport + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["rule_set"] = { + "name": "name_value", + "description": "description_value", + "source": "source_value", + "rules": [ + { + "description": "description_value", + "rule_id": "rule_id_value", + "trigger_type": 1, + "condition": "condition_value", + "actions": [ + { + "action_id": "action_id_value", + "access_control": { + "operation_type": 1, + "policy": { + "version": 774, + "bindings": [ + { + "role": "role_value", + "members": ["members_value1", "members_value2"], + "condition": { + "expression": "expression_value", + "title": "title_value", + "description": "description_value", + "location": "location_value", + }, + } + ], + "audit_configs": [ + { + "service": "service_value", + "audit_log_configs": [ + { + "log_type": 1, + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + } + ], + } + ], + "etag": b"etag_blob", + }, + }, + "data_validation": {"conditions": {}}, + "data_update": {"entries": {}}, + "add_to_folder": { + "folders": ["folders_value1", "folders_value2"] + }, + "publish_to_pub_sub": { + "topic_id": "topic_id_value", + "messages": ["messages_value1", "messages_value2"], + }, + "remove_from_folder_action": { + "condition": "condition_value", + "folder": "folder_value", + }, + "delete_document_action": {"enable_hard_delete": True}, + } + ], + } + ], + } + request = request_type(**request_init) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.RuleSetServiceGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_rule_set(request) + + +def test_create_rule_set_rest_flattened(): + client = RuleSetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.RuleSetServiceGrpcAsyncIOTransport( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = rule_engine.RuleSet() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + rule_set=rule_engine.RuleSet(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = rule_engine.RuleSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_rule_set(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/ruleSets" % client.transport._host, + args[1], + ) + + +def test_create_rule_set_rest_flattened_error(transport: str = "rest"): + client = RuleSetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_rule_set( + ruleset_service_request.CreateRuleSetRequest(), + parent="parent_value", + rule_set=rule_engine.RuleSet(name="name_value"), + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuleSetServiceGrpcTransport, - transports.RuleSetServiceGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() +def test_create_rule_set_rest_error(): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + ruleset_service_request.GetRuleSetRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = RuleSetServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. +def test_get_rule_set_rest(request_type): client = RuleSetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.RuleSetServiceGrpcTransport, + transport="rest", ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/ruleSets/sample3"} + request = request_type(**request_init) -def test_rule_set_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.RuleSetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = rule_engine.RuleSet( + name="name_value", + description="description_value", + source="source_value", ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = rule_engine.RuleSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) -def test_rule_set_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.contentwarehouse_v1.services.rule_set_service.transports.RuleSetServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.RuleSetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_rule_set(request) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_rule_set", - "get_rule_set", - "update_rule_set", - "delete_rule_set", - "list_rule_sets", + # Establish that the response is the type that we expect. + assert isinstance(response, rule_engine.RuleSet) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.source == "source_value" + + +def test_get_rule_set_rest_required_fields( + request_type=ruleset_service_request.GetRuleSetRequest, +): + transport_class = transports.RuleSetServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - with pytest.raises(NotImplementedError): - transport.close() + # verify fields with default values are dropped - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_rule_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with default values are now present -def test_rule_set_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.contentwarehouse_v1.services.rule_set_service.transports.RuleSetServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RuleSetServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) + jsonified_request["name"] = "name_value" + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_rule_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -def test_rule_set_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.contentwarehouse_v1.services.rule_set_service.transports.RuleSetServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RuleSetServiceTransport() - adc.assert_called_once() + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = rule_engine.RuleSet() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = rule_engine.RuleSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_rule_set(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rule_set_rest_unset_required_fields(): + transport = transports.RuleSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) -def test_rule_set_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RuleSetServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) + unset_fields = transport.get_rule_set._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.parametrize( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rule_set_rest_interceptors(null_interceptor): + transport = transports.RuleSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuleSetServiceRestInterceptor(), + ) + client = RuleSetServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuleSetServiceRestInterceptor, "post_get_rule_set" + ) as post, mock.patch.object( + transports.RuleSetServiceRestInterceptor, "pre_get_rule_set" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ruleset_service_request.GetRuleSetRequest.pb( + ruleset_service_request.GetRuleSetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = rule_engine.RuleSet.to_json(rule_engine.RuleSet()) + + request = ruleset_service_request.GetRuleSetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = rule_engine.RuleSet() + + client.get_rule_set( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rule_set_rest_bad_request( + transport: str = "rest", request_type=ruleset_service_request.GetRuleSetRequest +): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/ruleSets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_rule_set(request) + + +def test_get_rule_set_rest_flattened(): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = rule_engine.RuleSet() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/ruleSets/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = rule_engine.RuleSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_rule_set(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/ruleSets/*}" % client.transport._host, + args[1], + ) + + +def test_get_rule_set_rest_flattened_error(transport: str = "rest"): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_rule_set( + ruleset_service_request.GetRuleSetRequest(), + name="name_value", + ) + + +def test_get_rule_set_rest_error(): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + ruleset_service_request.UpdateRuleSetRequest, + dict, + ], +) +def test_update_rule_set_rest(request_type): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/ruleSets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = rule_engine.RuleSet( + name="name_value", + description="description_value", + source="source_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = rule_engine.RuleSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_rule_set(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, rule_engine.RuleSet) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.source == "source_value" + + +def test_update_rule_set_rest_required_fields( + request_type=ruleset_service_request.UpdateRuleSetRequest, +): + transport_class = transports.RuleSetServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_rule_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_rule_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = rule_engine.RuleSet() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = rule_engine.RuleSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_rule_set(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_rule_set_rest_unset_required_fields(): + transport = transports.RuleSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_rule_set._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "ruleSet", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rule_set_rest_interceptors(null_interceptor): + transport = transports.RuleSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuleSetServiceRestInterceptor(), + ) + client = RuleSetServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuleSetServiceRestInterceptor, "post_update_rule_set" + ) as post, mock.patch.object( + transports.RuleSetServiceRestInterceptor, "pre_update_rule_set" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ruleset_service_request.UpdateRuleSetRequest.pb( + ruleset_service_request.UpdateRuleSetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = rule_engine.RuleSet.to_json(rule_engine.RuleSet()) + + request = ruleset_service_request.UpdateRuleSetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = rule_engine.RuleSet() + + client.update_rule_set( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rule_set_rest_bad_request( + transport: str = "rest", request_type=ruleset_service_request.UpdateRuleSetRequest +): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/ruleSets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_rule_set(request) + + +def test_update_rule_set_rest_flattened(): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = rule_engine.RuleSet() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/ruleSets/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + rule_set=rule_engine.RuleSet(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = rule_engine.RuleSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_rule_set(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/ruleSets/*}" % client.transport._host, + args[1], + ) + + +def test_update_rule_set_rest_flattened_error(transport: str = "rest"): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_rule_set( + ruleset_service_request.UpdateRuleSetRequest(), + name="name_value", + rule_set=rule_engine.RuleSet(name="name_value"), + ) + + +def test_update_rule_set_rest_error(): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + ruleset_service_request.DeleteRuleSetRequest, + dict, + ], +) +def test_delete_rule_set_rest(request_type): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/ruleSets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_rule_set(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_rule_set_rest_required_fields( + request_type=ruleset_service_request.DeleteRuleSetRequest, +): + transport_class = transports.RuleSetServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_rule_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_rule_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_rule_set(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_rule_set_rest_unset_required_fields(): + transport = transports.RuleSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_rule_set._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rule_set_rest_interceptors(null_interceptor): + transport = transports.RuleSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuleSetServiceRestInterceptor(), + ) + client = RuleSetServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuleSetServiceRestInterceptor, "pre_delete_rule_set" + ) as pre: + pre.assert_not_called() + pb_message = ruleset_service_request.DeleteRuleSetRequest.pb( + ruleset_service_request.DeleteRuleSetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = ruleset_service_request.DeleteRuleSetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_rule_set( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_rule_set_rest_bad_request( + transport: str = "rest", request_type=ruleset_service_request.DeleteRuleSetRequest +): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/ruleSets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_rule_set(request) + + +def test_delete_rule_set_rest_flattened(): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/ruleSets/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_rule_set(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/ruleSets/*}" % client.transport._host, + args[1], + ) + + +def test_delete_rule_set_rest_flattened_error(transport: str = "rest"): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_rule_set( + ruleset_service_request.DeleteRuleSetRequest(), + name="name_value", + ) + + +def test_delete_rule_set_rest_error(): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + ruleset_service_request.ListRuleSetsRequest, + dict, + ], +) +def test_list_rule_sets_rest(request_type): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ruleset_service_request.ListRuleSetsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ruleset_service_request.ListRuleSetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_rule_sets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRuleSetsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_rule_sets_rest_required_fields( + request_type=ruleset_service_request.ListRuleSetsRequest, +): + transport_class = transports.RuleSetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_rule_sets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_rule_sets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ruleset_service_request.ListRuleSetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = ruleset_service_request.ListRuleSetsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_rule_sets(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rule_sets_rest_unset_required_fields(): + transport = transports.RuleSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_rule_sets._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rule_sets_rest_interceptors(null_interceptor): + transport = transports.RuleSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuleSetServiceRestInterceptor(), + ) + client = RuleSetServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuleSetServiceRestInterceptor, "post_list_rule_sets" + ) as post, mock.patch.object( + transports.RuleSetServiceRestInterceptor, "pre_list_rule_sets" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ruleset_service_request.ListRuleSetsRequest.pb( + ruleset_service_request.ListRuleSetsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + ruleset_service_request.ListRuleSetsResponse.to_json( + ruleset_service_request.ListRuleSetsResponse() + ) + ) + + request = ruleset_service_request.ListRuleSetsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ruleset_service_request.ListRuleSetsResponse() + + client.list_rule_sets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rule_sets_rest_bad_request( + transport: str = "rest", request_type=ruleset_service_request.ListRuleSetsRequest +): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_rule_sets(request) + + +def test_list_rule_sets_rest_flattened(): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ruleset_service_request.ListRuleSetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ruleset_service_request.ListRuleSetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_rule_sets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/ruleSets" % client.transport._host, + args[1], + ) + + +def test_list_rule_sets_rest_flattened_error(transport: str = "rest"): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_rule_sets( + ruleset_service_request.ListRuleSetsRequest(), + parent="parent_value", + ) + + +def test_list_rule_sets_rest_pager(transport: str = "rest"): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + ruleset_service_request.ListRuleSetsResponse( + rule_sets=[ + rule_engine.RuleSet(), + rule_engine.RuleSet(), + rule_engine.RuleSet(), + ], + next_page_token="abc", + ), + ruleset_service_request.ListRuleSetsResponse( + rule_sets=[], + next_page_token="def", + ), + ruleset_service_request.ListRuleSetsResponse( + rule_sets=[ + rule_engine.RuleSet(), + ], + next_page_token="ghi", + ), + ruleset_service_request.ListRuleSetsResponse( + rule_sets=[ + rule_engine.RuleSet(), + rule_engine.RuleSet(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + ruleset_service_request.ListRuleSetsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_rule_sets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, rule_engine.RuleSet) for i in results) + + pages = list(client.list_rule_sets(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RuleSetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RuleSetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RuleSetServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RuleSetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RuleSetServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RuleSetServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RuleSetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RuleSetServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RuleSetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RuleSetServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.RuleSetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.RuleSetServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RuleSetServiceGrpcTransport, + transports.RuleSetServiceGrpcAsyncIOTransport, + transports.RuleSetServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = RuleSetServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.RuleSetServiceGrpcTransport, + ) + + +def test_rule_set_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RuleSetServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_rule_set_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.contentwarehouse_v1.services.rule_set_service.transports.RuleSetServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.RuleSetServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_rule_set", + "get_rule_set", + "update_rule_set", + "delete_rule_set", + "list_rule_sets", + "get_operation", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_rule_set_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.contentwarehouse_v1.services.rule_set_service.transports.RuleSetServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RuleSetServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_rule_set_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.contentwarehouse_v1.services.rule_set_service.transports.RuleSetServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RuleSetServiceTransport() + adc.assert_called_once() + + +def test_rule_set_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RuleSetServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( "transport_class", [ transports.RuleSetServiceGrpcTransport, @@ -2288,6 +3867,7 @@ def test_rule_set_service_transport_auth_adc(transport_class): [ transports.RuleSetServiceGrpcTransport, transports.RuleSetServiceGrpcAsyncIOTransport, + transports.RuleSetServiceRestTransport, ], ) def test_rule_set_service_transport_auth_gdch_credentials(transport_class): @@ -2385,11 +3965,23 @@ def test_rule_set_service_grpc_transport_client_cert_source_for_mtls(transport_c ) +def test_rule_set_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RuleSetServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_rule_set_service_host_no_port(transport_name): @@ -2400,7 +3992,11 @@ def test_rule_set_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("contentwarehouse.googleapis.com:443") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) @pytest.mark.parametrize( @@ -2408,6 +4004,7 @@ def test_rule_set_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_rule_set_service_host_with_port(transport_name): @@ -2418,7 +4015,45 @@ def test_rule_set_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("contentwarehouse.googleapis.com:8000") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_rule_set_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RuleSetServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RuleSetServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_rule_set._session + session2 = client2.transport.create_rule_set._session + assert session1 != session2 + session1 = client1.transport.get_rule_set._session + session2 = client2.transport.get_rule_set._session + assert session1 != session2 + session1 = client1.transport.update_rule_set._session + session2 = client2.transport.update_rule_set._session + assert session1 != session2 + session1 = client1.transport.delete_rule_set._session + session2 = client2.transport.delete_rule_set._session + assert session1 != session2 + session1 = client1.transport.list_rule_sets._session + session2 = client2.transport.list_rule_sets._session + assert session1 != session2 def test_rule_set_service_grpc_transport_channel(): @@ -2762,8 +4397,212 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation(transport: str = "grpc"): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = RuleSetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = RuleSetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = RuleSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = RuleSetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2781,6 +4620,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py index c61f8afe4904..f9c90a9eccda 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -32,11 +34,14 @@ from google.auth.exceptions import MutualTLSChannelError from google.longrunning import operations_pb2 from google.oauth2 import service_account +from google.protobuf import json_format import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.contentwarehouse_v1.services.synonym_set_service import ( SynonymSetServiceAsyncClient, @@ -100,6 +105,7 @@ def test__get_default_mtls_endpoint(): [ (SynonymSetServiceClient, "grpc"), (SynonymSetServiceAsyncClient, "grpc_asyncio"), + (SynonymSetServiceClient, "rest"), ], ) def test_synonym_set_service_client_from_service_account_info( @@ -115,7 +121,11 @@ def test_synonym_set_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("contentwarehouse.googleapis.com:443") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) @pytest.mark.parametrize( @@ -123,6 +133,7 @@ def test_synonym_set_service_client_from_service_account_info( [ (transports.SynonymSetServiceGrpcTransport, "grpc"), (transports.SynonymSetServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SynonymSetServiceRestTransport, "rest"), ], ) def test_synonym_set_service_client_service_account_always_use_jwt( @@ -148,6 +159,7 @@ def test_synonym_set_service_client_service_account_always_use_jwt( [ (SynonymSetServiceClient, "grpc"), (SynonymSetServiceAsyncClient, "grpc_asyncio"), + (SynonymSetServiceClient, "rest"), ], ) def test_synonym_set_service_client_from_service_account_file( @@ -170,13 +182,18 @@ def test_synonym_set_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("contentwarehouse.googleapis.com:443") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) def test_synonym_set_service_client_get_transport_class(): transport = SynonymSetServiceClient.get_transport_class() available_transports = [ transports.SynonymSetServiceGrpcTransport, + transports.SynonymSetServiceRestTransport, ] assert transport in available_transports @@ -193,6 +210,7 @@ def test_synonym_set_service_client_get_transport_class(): transports.SynonymSetServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (SynonymSetServiceClient, transports.SynonymSetServiceRestTransport, "rest"), ], ) @mock.patch.object( @@ -348,6 +366,18 @@ def test_synonym_set_service_client_client_options( "grpc_asyncio", "false", ), + ( + SynonymSetServiceClient, + transports.SynonymSetServiceRestTransport, + "rest", + "true", + ), + ( + SynonymSetServiceClient, + transports.SynonymSetServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -547,6 +577,7 @@ def test_synonym_set_service_client_get_mtls_endpoint_and_cert_source(client_cla transports.SynonymSetServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (SynonymSetServiceClient, transports.SynonymSetServiceRestTransport, "rest"), ], ) def test_synonym_set_service_client_client_options_scopes( @@ -587,6 +618,12 @@ def test_synonym_set_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + SynonymSetServiceClient, + transports.SynonymSetServiceRestTransport, + "rest", + None, + ), ], ) def test_synonym_set_service_client_client_options_credentials_file( @@ -2136,174 +2173,1642 @@ async def test_list_synonym_sets_async_pages(): assert page_.raw_page.next_page_token == token -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.SynonymSetServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + synonymset_service_request.CreateSynonymSetRequest, + dict, + ], +) +def test_create_synonym_set_rest(request_type): + client = SynonymSetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = SynonymSetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["synonym_set"] = { + "name": "name_value", + "context": "context_value", + "synonyms": [{"words": ["words_value1", "words_value2"]}], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = synonymset.SynonymSet( + name="name_value", + context="context_value", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.SynonymSetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SynonymSetServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = synonymset.SynonymSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_synonym_set(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, synonymset.SynonymSet) + assert response.name == "name_value" + assert response.context == "context_value" + + +def test_create_synonym_set_rest_required_fields( + request_type=synonymset_service_request.CreateSynonymSetRequest, +): + transport_class = transports.SynonymSetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.SynonymSetServiceGrpcTransport( + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_synonym_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_synonym_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SynonymSetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = synonymset.SynonymSet() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = synonymset.SynonymSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_synonym_set(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_synonym_set_rest_unset_required_fields(): + transport = transports.SynonymSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SynonymSetServiceClient( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SynonymSetServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + unset_fields = transport.create_synonym_set._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "synonymSet", + ) ) + ) - # It is an error to provide scopes and a transport instance. - transport = transports.SynonymSetServiceGrpcTransport( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_synonym_set_rest_interceptors(null_interceptor): + transport = transports.SynonymSetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SynonymSetServiceRestInterceptor(), ) - with pytest.raises(ValueError): - client = SynonymSetServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client = SynonymSetServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SynonymSetServiceRestInterceptor, "post_create_synonym_set" + ) as post, mock.patch.object( + transports.SynonymSetServiceRestInterceptor, "pre_create_synonym_set" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = synonymset_service_request.CreateSynonymSetRequest.pb( + synonymset_service_request.CreateSynonymSetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = synonymset.SynonymSet.to_json( + synonymset.SynonymSet() ) + request = synonymset_service_request.CreateSynonymSetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = synonymset.SynonymSet() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.SynonymSetServiceGrpcTransport( + client.create_synonym_set( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_synonym_set_rest_bad_request( + transport: str = "rest", + request_type=synonymset_service_request.CreateSynonymSetRequest, +): + client = SynonymSetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = SynonymSetServiceClient(transport=transport) - assert client.transport is transport + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["synonym_set"] = { + "name": "name_value", + "context": "context_value", + "synonyms": [{"words": ["words_value1", "words_value2"]}], + } + request = request_type(**request_init) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.SynonymSetServiceGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_synonym_set(request) + + +def test_create_synonym_set_rest_flattened(): + client = SynonymSetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.SynonymSetServiceGrpcAsyncIOTransport( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = synonymset.SynonymSet() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + synonym_set=synonymset.SynonymSet(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = synonymset.SynonymSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_synonym_set(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/synonymSets" + % client.transport._host, + args[1], + ) + + +def test_create_synonym_set_rest_flattened_error(transport: str = "rest"): + client = SynonymSetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_synonym_set( + synonymset_service_request.CreateSynonymSetRequest(), + parent="parent_value", + synonym_set=synonymset.SynonymSet(name="name_value"), + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.SynonymSetServiceGrpcTransport, - transports.SynonymSetServiceGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + +def test_create_synonym_set_rest_error(): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + synonymset_service_request.GetSynonymSetRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = SynonymSetServiceClient.get_transport_class(transport_name)( +def test_get_synonym_set_rest(request_type): + client = SynonymSetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/synonymSets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = synonymset.SynonymSet( + name="name_value", + context="context_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = synonymset.SynonymSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_synonym_set(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, synonymset.SynonymSet) + assert response.name == "name_value" + assert response.context == "context_value" + + +def test_get_synonym_set_rest_required_fields( + request_type=synonymset_service_request.GetSynonymSetRequest, +): + transport_class = transports.SynonymSetServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_synonym_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_synonym_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. client = SynonymSetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert isinstance( - client.transport, - transports.SynonymSetServiceGrpcTransport, + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = synonymset.SynonymSet() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = synonymset.SynonymSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_synonym_set(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_synonym_set_rest_unset_required_fields(): + transport = transports.SynonymSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) + unset_fields = transport.get_synonym_set._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_synonym_set_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.SynonymSetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_synonym_set_rest_interceptors(null_interceptor): + transport = transports.SynonymSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SynonymSetServiceRestInterceptor(), + ) + client = SynonymSetServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SynonymSetServiceRestInterceptor, "post_get_synonym_set" + ) as post, mock.patch.object( + transports.SynonymSetServiceRestInterceptor, "pre_get_synonym_set" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = synonymset_service_request.GetSynonymSetRequest.pb( + synonymset_service_request.GetSynonymSetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = synonymset.SynonymSet.to_json( + synonymset.SynonymSet() ) + request = synonymset_service_request.GetSynonymSetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = synonymset.SynonymSet() -def test_synonym_set_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.contentwarehouse_v1.services.synonym_set_service.transports.SynonymSetServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.SynonymSetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), + client.get_synonym_set( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_synonym_set", - "get_synonym_set", - "update_synonym_set", - "delete_synonym_set", - "list_synonym_sets", + pre.assert_called_once() + post.assert_called_once() + + +def test_get_synonym_set_rest_bad_request( + transport: str = "rest", + request_type=synonymset_service_request.GetSynonymSetRequest, +): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - with pytest.raises(NotImplementedError): - transport.close() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/synonymSets/sample3"} + request = request_type(**request_init) - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_synonym_set(request) -def test_synonym_set_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.contentwarehouse_v1.services.synonym_set_service.transports.SynonymSetServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) +def test_get_synonym_set_rest_flattened(): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = synonymset.SynonymSet() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/synonymSets/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = synonymset.SynonymSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_synonym_set(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/synonymSets/*}" + % client.transport._host, + args[1], + ) + + +def test_get_synonym_set_rest_flattened_error(transport: str = "rest"): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_synonym_set( + synonymset_service_request.GetSynonymSetRequest(), + name="name_value", + ) + + +def test_get_synonym_set_rest_error(): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + synonymset_service_request.UpdateSynonymSetRequest, + dict, + ], +) +def test_update_synonym_set_rest(request_type): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/synonymSets/sample3"} + request_init["synonym_set"] = { + "name": "name_value", + "context": "context_value", + "synonyms": [{"words": ["words_value1", "words_value2"]}], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = synonymset.SynonymSet( + name="name_value", + context="context_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = synonymset.SynonymSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_synonym_set(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, synonymset.SynonymSet) + assert response.name == "name_value" + assert response.context == "context_value" + + +def test_update_synonym_set_rest_required_fields( + request_type=synonymset_service_request.UpdateSynonymSetRequest, +): + transport_class = transports.SynonymSetServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_synonym_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_synonym_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = synonymset.SynonymSet() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = synonymset.SynonymSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_synonym_set(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_synonym_set_rest_unset_required_fields(): + transport = transports.SynonymSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_synonym_set._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "synonymSet", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_synonym_set_rest_interceptors(null_interceptor): + transport = transports.SynonymSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SynonymSetServiceRestInterceptor(), + ) + client = SynonymSetServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SynonymSetServiceRestInterceptor, "post_update_synonym_set" + ) as post, mock.patch.object( + transports.SynonymSetServiceRestInterceptor, "pre_update_synonym_set" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = synonymset_service_request.UpdateSynonymSetRequest.pb( + synonymset_service_request.UpdateSynonymSetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = synonymset.SynonymSet.to_json( + synonymset.SynonymSet() + ) + + request = synonymset_service_request.UpdateSynonymSetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = synonymset.SynonymSet() + + client.update_synonym_set( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_synonym_set_rest_bad_request( + transport: str = "rest", + request_type=synonymset_service_request.UpdateSynonymSetRequest, +): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/synonymSets/sample3"} + request_init["synonym_set"] = { + "name": "name_value", + "context": "context_value", + "synonyms": [{"words": ["words_value1", "words_value2"]}], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_synonym_set(request) + + +def test_update_synonym_set_rest_flattened(): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = synonymset.SynonymSet() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/synonymSets/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + synonym_set=synonymset.SynonymSet(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = synonymset.SynonymSet.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_synonym_set(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/synonymSets/*}" + % client.transport._host, + args[1], + ) + + +def test_update_synonym_set_rest_flattened_error(transport: str = "rest"): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_synonym_set( + synonymset_service_request.UpdateSynonymSetRequest(), + name="name_value", + synonym_set=synonymset.SynonymSet(name="name_value"), + ) + + +def test_update_synonym_set_rest_error(): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + synonymset_service_request.DeleteSynonymSetRequest, + dict, + ], +) +def test_delete_synonym_set_rest(request_type): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/synonymSets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_synonym_set(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_synonym_set_rest_required_fields( + request_type=synonymset_service_request.DeleteSynonymSetRequest, +): + transport_class = transports.SynonymSetServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_synonym_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_synonym_set._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_synonym_set(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_synonym_set_rest_unset_required_fields(): + transport = transports.SynonymSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_synonym_set._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_synonym_set_rest_interceptors(null_interceptor): + transport = transports.SynonymSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SynonymSetServiceRestInterceptor(), + ) + client = SynonymSetServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SynonymSetServiceRestInterceptor, "pre_delete_synonym_set" + ) as pre: + pre.assert_not_called() + pb_message = synonymset_service_request.DeleteSynonymSetRequest.pb( + synonymset_service_request.DeleteSynonymSetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = synonymset_service_request.DeleteSynonymSetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_synonym_set( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_synonym_set_rest_bad_request( + transport: str = "rest", + request_type=synonymset_service_request.DeleteSynonymSetRequest, +): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/synonymSets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_synonym_set(request) + + +def test_delete_synonym_set_rest_flattened(): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/synonymSets/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_synonym_set(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/synonymSets/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_synonym_set_rest_flattened_error(transport: str = "rest"): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_synonym_set( + synonymset_service_request.DeleteSynonymSetRequest(), + name="name_value", + ) + + +def test_delete_synonym_set_rest_error(): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + synonymset_service_request.ListSynonymSetsRequest, + dict, + ], +) +def test_list_synonym_sets_rest(request_type): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = synonymset_service_request.ListSynonymSetsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = synonymset_service_request.ListSynonymSetsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_synonym_sets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSynonymSetsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_synonym_sets_rest_required_fields( + request_type=synonymset_service_request.ListSynonymSetsRequest, +): + transport_class = transports.SynonymSetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_synonym_sets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_synonym_sets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = synonymset_service_request.ListSynonymSetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = synonymset_service_request.ListSynonymSetsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_synonym_sets(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_synonym_sets_rest_unset_required_fields(): + transport = transports.SynonymSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_synonym_sets._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_synonym_sets_rest_interceptors(null_interceptor): + transport = transports.SynonymSetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SynonymSetServiceRestInterceptor(), + ) + client = SynonymSetServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SynonymSetServiceRestInterceptor, "post_list_synonym_sets" + ) as post, mock.patch.object( + transports.SynonymSetServiceRestInterceptor, "pre_list_synonym_sets" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = synonymset_service_request.ListSynonymSetsRequest.pb( + synonymset_service_request.ListSynonymSetsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + synonymset_service_request.ListSynonymSetsResponse.to_json( + synonymset_service_request.ListSynonymSetsResponse() + ) + ) + + request = synonymset_service_request.ListSynonymSetsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = synonymset_service_request.ListSynonymSetsResponse() + + client.list_synonym_sets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_synonym_sets_rest_bad_request( + transport: str = "rest", + request_type=synonymset_service_request.ListSynonymSetsRequest, +): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_synonym_sets(request) + + +def test_list_synonym_sets_rest_flattened(): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = synonymset_service_request.ListSynonymSetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = synonymset_service_request.ListSynonymSetsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_synonym_sets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/synonymSets" + % client.transport._host, + args[1], + ) + + +def test_list_synonym_sets_rest_flattened_error(transport: str = "rest"): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_synonym_sets( + synonymset_service_request.ListSynonymSetsRequest(), + parent="parent_value", + ) + + +def test_list_synonym_sets_rest_pager(transport: str = "rest"): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + synonymset_service_request.ListSynonymSetsResponse( + synonym_sets=[ + synonymset.SynonymSet(), + synonymset.SynonymSet(), + synonymset.SynonymSet(), + ], + next_page_token="abc", + ), + synonymset_service_request.ListSynonymSetsResponse( + synonym_sets=[], + next_page_token="def", + ), + synonymset_service_request.ListSynonymSetsResponse( + synonym_sets=[ + synonymset.SynonymSet(), + ], + next_page_token="ghi", + ), + synonymset_service_request.ListSynonymSetsResponse( + synonym_sets=[ + synonymset.SynonymSet(), + synonymset.SynonymSet(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + synonymset_service_request.ListSynonymSetsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_synonym_sets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, synonymset.SynonymSet) for i in results) + + pages = list(client.list_synonym_sets(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SynonymSetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SynonymSetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SynonymSetServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SynonymSetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SynonymSetServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SynonymSetServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SynonymSetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SynonymSetServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SynonymSetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SynonymSetServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SynonymSetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SynonymSetServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SynonymSetServiceGrpcTransport, + transports.SynonymSetServiceGrpcAsyncIOTransport, + transports.SynonymSetServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = SynonymSetServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SynonymSetServiceGrpcTransport, + ) + + +def test_synonym_set_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SynonymSetServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_synonym_set_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.contentwarehouse_v1.services.synonym_set_service.transports.SynonymSetServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SynonymSetServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_synonym_set", + "get_synonym_set", + "update_synonym_set", + "delete_synonym_set", + "list_synonym_sets", + "get_operation", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_synonym_set_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.contentwarehouse_v1.services.synonym_set_service.transports.SynonymSetServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.SynonymSetServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", @@ -2364,6 +3869,7 @@ def test_synonym_set_service_transport_auth_adc(transport_class): [ transports.SynonymSetServiceGrpcTransport, transports.SynonymSetServiceGrpcAsyncIOTransport, + transports.SynonymSetServiceRestTransport, ], ) def test_synonym_set_service_transport_auth_gdch_credentials(transport_class): @@ -2463,11 +3969,23 @@ def test_synonym_set_service_grpc_transport_client_cert_source_for_mtls( ) +def test_synonym_set_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SynonymSetServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_synonym_set_service_host_no_port(transport_name): @@ -2478,7 +3996,11 @@ def test_synonym_set_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("contentwarehouse.googleapis.com:443") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) @pytest.mark.parametrize( @@ -2486,6 +4008,7 @@ def test_synonym_set_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_synonym_set_service_host_with_port(transport_name): @@ -2496,7 +4019,45 @@ def test_synonym_set_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("contentwarehouse.googleapis.com:8000") + assert client.transport._host == ( + "contentwarehouse.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_synonym_set_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SynonymSetServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SynonymSetServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_synonym_set._session + session2 = client2.transport.create_synonym_set._session + assert session1 != session2 + session1 = client1.transport.get_synonym_set._session + session2 = client2.transport.get_synonym_set._session + assert session1 != session2 + session1 = client1.transport.update_synonym_set._session + session2 = client2.transport.update_synonym_set._session + assert session1 != session2 + session1 = client1.transport.delete_synonym_set._session + session2 = client2.transport.delete_synonym_set._session + assert session1 != session2 + session1 = client1.transport.list_synonym_sets._session + session2 = client2.transport.list_synonym_sets._session + assert session1 != session2 def test_synonym_set_service_grpc_transport_channel(): @@ -2814,8 +4375,212 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation(transport: str = "grpc"): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = SynonymSetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = SynonymSetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = SynonymSetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = SynonymSetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2833,6 +4598,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/packages/google-cloud-datacatalog-lineage/.coveragerc b/packages/google-cloud-datacatalog-lineage/.coveragerc index da175de4cc02..a83a7401aa0a 100644 --- a/packages/google-cloud-datacatalog-lineage/.coveragerc +++ b/packages/google-cloud-datacatalog-lineage/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/cloud/datacatalog/lineage/__init__.py + google/cloud/datacatalog/lineage/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/google-cloud-datacatalog-lineage/CHANGELOG.md b/packages/google-cloud-datacatalog-lineage/CHANGELOG.md index ac26bbc9fb7c..4b53daf9ebc9 100644 --- a/packages/google-cloud-datacatalog-lineage/CHANGELOG.md +++ b/packages/google-cloud-datacatalog-lineage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.2.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-lineage-v0.2.1...google-cloud-datacatalog-lineage-v0.2.2) (2023-03-25) + + +### Documentation + +* Fix formatting of request arg in docstring ([#10867](https://github.com/googleapis/google-cloud-python/issues/10867)) ([d34a425](https://github.com/googleapis/google-cloud-python/commit/d34a425f7d0f02bebaf20d24b725b8c25c699697)) + ## [0.2.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-lineage-v0.2.0...google-cloud-datacatalog-lineage-v0.2.1) (2023-01-20) diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage/gapic_version.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage/gapic_version.py index 9af1468d4a23..0d82451c5d9f 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage/gapic_version.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.2.1" # {x-release-please-version} +__version__ = "0.2.2" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/__init__.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/__init__.py index 65192278e6b7..706772f3571f 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/__init__.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.datacatalog.lineage import gapic_version as package_version +from google.cloud.datacatalog.lineage_v1 import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/gapic_version.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/gapic_version.py index 7dec057c0bab..0d82451c5d9f 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/gapic_version.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.2" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/transports/rest.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/transports/rest.py index a818a2b7f160..2adf51276ae8 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/transports/rest.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/transports/rest.py @@ -17,7 +17,7 @@ import dataclasses import json # type: ignore import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings from google.api_core import ( @@ -530,7 +530,7 @@ def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]], - ) -> None: + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -538,9 +538,7 @@ def pre_cancel_operation( """ return request, metadata - def post_cancel_operation( - self, response: operations_pb2.CancelOperationRequest - ) -> None: + def post_cancel_operation(self, response: None) -> None: """Post-rpc interceptor for cancel_operation Override in a subclass to manipulate the response @@ -553,7 +551,7 @@ def pre_delete_operation( self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]], - ) -> None: + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -561,9 +559,7 @@ def pre_delete_operation( """ return request, metadata - def post_delete_operation( - self, response: operations_pb2.DeleteOperationRequest - ) -> None: + def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -576,7 +572,7 @@ def pre_get_operation( self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]], - ) -> operations_pb2.Operation: + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -585,7 +581,7 @@ def pre_get_operation( return request, metadata def post_get_operation( - self, response: operations_pb2.GetOperationRequest + self, response: operations_pb2.Operation ) -> operations_pb2.Operation: """Post-rpc interceptor for get_operation @@ -599,7 +595,7 @@ def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]], - ) -> operations_pb2.ListOperationsResponse: + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -608,7 +604,7 @@ def pre_list_operations( return request, metadata def post_list_operations( - self, response: operations_pb2.ListOperationsRequest + self, response: operations_pb2.ListOperationsResponse ) -> operations_pb2.ListOperationsResponse: """Post-rpc interceptor for list_operations @@ -777,7 +773,7 @@ class _BatchSearchLinkProcesses(LineageRestStub): def __hash__(self): return hash("BatchSearchLinkProcesses") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -802,7 +798,6 @@ def __call__( request (~.lineage.BatchSearchLinkProcessesRequest): The request object. Request message for [BatchSearchLinkProcesses][google.cloud.datacatalog.lineage.v1.Lineage.BatchSearchLinkProcesses]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -879,7 +874,7 @@ class _CreateLineageEvent(LineageRestStub): def __hash__(self): return hash("CreateLineageEvent") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -903,7 +898,6 @@ def __call__( request (~.lineage.CreateLineageEventRequest): The request object. Request message for [CreateLineageEvent][google.cloud.datacatalog.lineage.v1.CreateLineageEvent]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -983,7 +977,7 @@ class _CreateProcess(LineageRestStub): def __hash__(self): return hash("CreateProcess") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1007,7 +1001,6 @@ def __call__( request (~.lineage.CreateProcessRequest): The request object. Request message for [CreateProcess][google.cloud.datacatalog.lineage.v1.CreateProcess]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1082,7 +1075,7 @@ class _CreateRun(LineageRestStub): def __hash__(self): return hash("CreateRun") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1106,7 +1099,6 @@ def __call__( request (~.lineage.CreateRunRequest): The request object. Request message for [CreateRun][google.cloud.datacatalog.lineage.v1.CreateRun]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1182,7 +1174,7 @@ class _DeleteLineageEvent(LineageRestStub): def __hash__(self): return hash("DeleteLineageEvent") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1206,7 +1198,6 @@ def __call__( request (~.lineage.DeleteLineageEventRequest): The request object. Request message for [DeleteLineageEvent][google.cloud.datacatalog.lineage.v1.DeleteLineageEvent]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1260,7 +1251,7 @@ class _DeleteProcess(LineageRestStub): def __hash__(self): return hash("DeleteProcess") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1284,7 +1275,6 @@ def __call__( request (~.lineage.DeleteProcessRequest): The request object. Request message for [DeleteProcess][google.cloud.datacatalog.lineage.v1.DeleteProcess]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1349,7 +1339,7 @@ class _DeleteRun(LineageRestStub): def __hash__(self): return hash("DeleteRun") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1373,7 +1363,6 @@ def __call__( request (~.lineage.DeleteRunRequest): The request object. Request message for [DeleteRun][google.cloud.datacatalog.lineage.v1.DeleteRun]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1438,7 +1427,7 @@ class _GetLineageEvent(LineageRestStub): def __hash__(self): return hash("GetLineageEvent") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1462,7 +1451,6 @@ def __call__( request (~.lineage.GetLineageEventRequest): The request object. Request message for [GetLineageEvent][google.cloud.datacatalog.lineage.v1.GetLineageEvent]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1533,7 +1521,7 @@ class _GetProcess(LineageRestStub): def __hash__(self): return hash("GetProcess") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1557,7 +1545,6 @@ def __call__( request (~.lineage.GetProcessRequest): The request object. Request message for [GetProcess][google.cloud.datacatalog.lineage.v1.GetProcess]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1623,7 +1610,7 @@ class _GetRun(LineageRestStub): def __hash__(self): return hash("GetRun") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1647,7 +1634,6 @@ def __call__( request (~.lineage.GetRunRequest): The request object. Request message for [GetRun][google.cloud.datacatalog.lineage.v1.GetRun]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1714,7 +1700,7 @@ class _ListLineageEvents(LineageRestStub): def __hash__(self): return hash("ListLineageEvents") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1738,7 +1724,6 @@ def __call__( request (~.lineage.ListLineageEventsRequest): The request object. Request message for [ListLineageEvents][google.cloud.datacatalog.lineage.v1.ListLineageEvents]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1806,7 +1791,7 @@ class _ListProcesses(LineageRestStub): def __hash__(self): return hash("ListProcesses") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1830,7 +1815,6 @@ def __call__( request (~.lineage.ListProcessesRequest): The request object. Request message for [ListProcesses][google.cloud.datacatalog.lineage.v1.ListProcesses]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1896,7 +1880,7 @@ class _ListRuns(LineageRestStub): def __hash__(self): return hash("ListRuns") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1920,7 +1904,6 @@ def __call__( request (~.lineage.ListRunsRequest): The request object. Request message for [ListRuns][google.cloud.datacatalog.lineage.v1.ListRuns]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1986,7 +1969,7 @@ class _SearchLinks(LineageRestStub): def __hash__(self): return hash("SearchLinks") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2010,7 +1993,6 @@ def __call__( request (~.lineage.SearchLinksRequest): The request object. Request message for [SearchLinks][google.cloud.datacatalog.lineage.v1.Lineage.SearchLinks]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2085,7 +2067,7 @@ class _UpdateProcess(LineageRestStub): def __hash__(self): return hash("UpdateProcess") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2109,7 +2091,6 @@ def __call__( request (~.lineage.UpdateProcessRequest): The request object. Request message for [UpdateProcess][google.cloud.datacatalog.lineage.v1.UpdateProcess]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2184,7 +2165,7 @@ class _UpdateRun(LineageRestStub): def __hash__(self): return hash("UpdateRun") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2208,7 +2189,6 @@ def __call__( request (~.lineage.UpdateRunRequest): The request object. Request message for [UpdateRun][google.cloud.datacatalog.lineage.v1.UpdateRun]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/types/lineage.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/types/lineage.py index 409d441f6c87..e4f9dbf36b2b 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/types/lineage.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/types/lineage.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import field_mask_pb2 # type: ignore @@ -859,8 +861,8 @@ class SearchLinksRequest(proto.Message): Attributes: parent (str): - Required. The project and location you want - search in. + Required. The project and location you want search in the + format ``projects/*/locations/*`` source (google.cloud.datacatalog.lineage_v1.types.EntityReference): Optional. Send asset information in the **source** field to retrieve all links that lead from the specified asset to @@ -1003,8 +1005,8 @@ class BatchSearchLinkProcessesRequest(proto.Message): Attributes: parent (str): - Required. The project and location where you - want to search. + Required. The project and location you want search in the + format ``projects/*/locations/*`` links (MutableSequence[str]): Required. An array of links to check for their associated LineageProcesses. @@ -1148,11 +1150,9 @@ class Origin(proto.Message): must match the project and location of the lineage resource being created. Examples: - - {source_type: COMPOSER, name: - "projects/foo/locations/us/environments/bar"} - - {source_type: BIGQUERY, name: - "projects/foo/locations/eu"} - - {source_type: CUSTOM, name: "myCustomIntegration"} + - ``{source_type: COMPOSER, name: "projects/foo/locations/us/environments/bar"}`` + - ``{source_type: BIGQUERY, name: "projects/foo/locations/eu"}`` + - ``{source_type: CUSTOM, name: "myCustomIntegration"}`` """ class SourceType(proto.Enum): diff --git a/packages/google-cloud-datacatalog-lineage/noxfile.py b/packages/google-cloud-datacatalog-lineage/noxfile.py index fcacc5d528dd..1749edb5dad7 100644 --- a/packages/google-cloud-datacatalog-lineage/noxfile.py +++ b/packages/google-cloud-datacatalog-lineage/noxfile.py @@ -189,9 +189,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -378,8 +378,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", diff --git a/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json b/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json index 72fec78f95f3..6a3fbb91d559 100644 --- a/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json +++ b/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog-lineage", - "version": "0.2.1" + "version": "0.2.2" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog-lineage/setup.py b/packages/google-cloud-datacatalog-lineage/setup.py index 9261e0104026..f0fed2bb86c5 100644 --- a/packages/google-cloud-datacatalog-lineage/setup.py +++ b/packages/google-cloud-datacatalog-lineage/setup.py @@ -57,9 +57,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.cloud", "google.cloud.datacatalog"] setuptools.setup( name=name, diff --git a/packages/google-cloud-discoveryengine/.coveragerc b/packages/google-cloud-discoveryengine/.coveragerc index 8f5c7cd25552..f2c9bc595be9 100644 --- a/packages/google-cloud-discoveryengine/.coveragerc +++ b/packages/google-cloud-discoveryengine/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/cloud/discoveryengine/__init__.py + google/cloud/discoveryengine/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/CHANGELOG.md b/packages/google-cloud-discoveryengine/CHANGELOG.md index 1512347d2716..b99be79e8bdd 100644 --- a/packages/google-cloud-discoveryengine/CHANGELOG.md +++ b/packages/google-cloud-discoveryengine/CHANGELOG.md @@ -1,5 +1,68 @@ # Changelog +## [0.8.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.7.0...google-cloud-discoveryengine-v0.8.0) (2023-05-25) + + +### Features + +* **v1:** add discovery engine v1 client library ([10671ec](https://github.com/googleapis/google-cloud-python/commit/10671ec9c0b16892100f2c91a6138571fabcbeb2)) +* **v1beta:** add LRO API for schema service ([10671ec](https://github.com/googleapis/google-cloud-python/commit/10671ec9c0b16892100f2c91a6138571fabcbeb2)) +* **v1beta:** allow users to specify id field in documents gcs import ([10671ec](https://github.com/googleapis/google-cloud-python/commit/10671ec9c0b16892100f2c91a6138571fabcbeb2)) + + +### Documentation + +* **v1beta:** keep the API doc up-to-date with recent changes ([10671ec](https://github.com/googleapis/google-cloud-python/commit/10671ec9c0b16892100f2c91a6138571fabcbeb2)) + +## [0.7.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.6.0...google-cloud-discoveryengine-v0.7.0) (2023-05-17) + + +### Features + +* allow users to provide additional labels in search ([c6addce](https://github.com/googleapis/google-cloud-python/commit/c6addce5b9281c75db2c7e96d7a57cb202d48006)) +* allow users to provide user info in search ([c6addce](https://github.com/googleapis/google-cloud-python/commit/c6addce5b9281c75db2c7e96d7a57cb202d48006)) +* enable safe search feature for site search ([c6addce](https://github.com/googleapis/google-cloud-python/commit/c6addce5b9281c75db2c7e96d7a57cb202d48006)) + +## [0.6.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.5.0...google-cloud-discoveryengine-v0.6.0) (2023-05-04) + + +### Features + +* add document purge API ([#11140](https://github.com/googleapis/google-cloud-python/issues/11140)) ([1115310](https://github.com/googleapis/google-cloud-python/commit/1115310f07f767a5c6fac5831e046aa9e879f0ea)) + +## [0.5.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.4.1...google-cloud-discoveryengine-v0.5.0) (2023-04-21) + + +### Features + +* add search, autocomplete and schema services ([5e1e0af](https://github.com/googleapis/google-cloud-python/commit/5e1e0af0dadd3bd801664c7d315b743a4dbf7b0b)) +* add unstructured document support (PDF/HTML) ([5e1e0af](https://github.com/googleapis/google-cloud-python/commit/5e1e0af0dadd3bd801664c7d315b743a4dbf7b0b)) + + +### Documentation + +* keep the API doc up-to-date with recent changes ([5e1e0af](https://github.com/googleapis/google-cloud-python/commit/5e1e0af0dadd3bd801664c7d315b743a4dbf7b0b)) + +## [0.4.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.4.0...google-cloud-discoveryengine-v0.4.1) (2023-03-25) + + +### Documentation + +* Fix formatting of request arg in docstring ([#10867](https://github.com/googleapis/google-cloud-python/issues/10867)) ([d34a425](https://github.com/googleapis/google-cloud-python/commit/d34a425f7d0f02bebaf20d24b725b8c25c699697)) + +## [0.4.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.3.1...google-cloud-discoveryengine-v0.4.0) (2023-03-22) + + +### Features + +* add additional resource path with collections ([68d329e](https://github.com/googleapis/google-cloud-python/commit/68d329e886be89629c1961ee87e0b1123f3feda4)) +* document schema id is no longer required ([68d329e](https://github.com/googleapis/google-cloud-python/commit/68d329e886be89629c1961ee87e0b1123f3feda4)) + + +### Documentation + +* keep the API doc up-to-date with recent changes ([68d329e](https://github.com/googleapis/google-cloud-python/commit/68d329e886be89629c1961ee87e0b1123f3feda4)) + ## [0.3.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.3.0...google-cloud-discoveryengine-v0.3.1) (2023-01-20) diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/completion_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/completion_service.rst new file mode 100644 index 000000000000..f18a0cfa68f6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/completion_service.rst @@ -0,0 +1,6 @@ +CompletionService +----------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1.services.completion_service + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/document_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/document_service.rst new file mode 100644 index 000000000000..a0513d278416 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/document_service.rst @@ -0,0 +1,10 @@ +DocumentService +--------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1.services.document_service + :members: + :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1.services.document_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/schema_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/schema_service.rst new file mode 100644 index 000000000000..725b280ca3bf --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/schema_service.rst @@ -0,0 +1,10 @@ +SchemaService +------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1.services.schema_service + :members: + :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1.services.schema_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/search_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/search_service.rst new file mode 100644 index 000000000000..54969826c038 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/search_service.rst @@ -0,0 +1,10 @@ +SearchService +------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1.services.search_service + :members: + :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1.services.search_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services.rst new file mode 100644 index 000000000000..5a83cad28f91 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services.rst @@ -0,0 +1,10 @@ +Services for Google Cloud Discoveryengine v1 API +================================================ +.. toctree:: + :maxdepth: 2 + + completion_service + document_service + schema_service + search_service + user_event_service diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/types.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/types.rst new file mode 100644 index 000000000000..07127eae56ef --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Discoveryengine v1 API +============================================= + +.. automodule:: google.cloud.discoveryengine_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/user_event_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/user_event_service.rst new file mode 100644 index 000000000000..74a5dca6bc44 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/user_event_service.rst @@ -0,0 +1,6 @@ +UserEventService +---------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1.services.user_event_service + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/completion_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/completion_service.rst new file mode 100644 index 000000000000..9e9b191d4bcb --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/completion_service.rst @@ -0,0 +1,6 @@ +CompletionService +----------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1beta.services.completion_service + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/schema_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/schema_service.rst new file mode 100644 index 000000000000..abb2fd929203 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/schema_service.rst @@ -0,0 +1,10 @@ +SchemaService +------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1beta.services.schema_service + :members: + :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1beta.services.schema_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/search_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/search_service.rst new file mode 100644 index 000000000000..5d3bbc22240d --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/search_service.rst @@ -0,0 +1,10 @@ +SearchService +------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1beta.services.search_service + :members: + :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1beta.services.search_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/services.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/services.rst index 61e97789ac1d..dc993ef09ce3 100644 --- a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/services.rst +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/services.rst @@ -3,6 +3,9 @@ Services for Google Cloud Discoveryengine v1beta API .. toctree:: :maxdepth: 2 + completion_service document_service recommendation_service + schema_service + search_service user_event_service diff --git a/packages/google-cloud-discoveryengine/docs/index.rst b/packages/google-cloud-discoveryengine/docs/index.rst index 6c267c5d89e6..9b9609fafd80 100644 --- a/packages/google-cloud-discoveryengine/docs/index.rst +++ b/packages/google-cloud-discoveryengine/docs/index.rst @@ -2,6 +2,17 @@ .. include:: multiprocessing.rst +This package includes clients for multiple versions of Discovery Engine API. +By default, you will get version ``discoveryengine_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + discoveryengine_v1/services + discoveryengine_v1/types API Reference ------------- diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py index 0e9a9c42d647..77f4af1c2a16 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py @@ -18,6 +18,12 @@ __version__ = package_version.__version__ +from google.cloud.discoveryengine_v1beta.services.completion_service.async_client import ( + CompletionServiceAsyncClient, +) +from google.cloud.discoveryengine_v1beta.services.completion_service.client import ( + CompletionServiceClient, +) from google.cloud.discoveryengine_v1beta.services.document_service.async_client import ( DocumentServiceAsyncClient, ) @@ -30,13 +36,33 @@ from google.cloud.discoveryengine_v1beta.services.recommendation_service.client import ( RecommendationServiceClient, ) +from google.cloud.discoveryengine_v1beta.services.schema_service.async_client import ( + SchemaServiceAsyncClient, +) +from google.cloud.discoveryengine_v1beta.services.schema_service.client import ( + SchemaServiceClient, +) +from google.cloud.discoveryengine_v1beta.services.search_service.async_client import ( + SearchServiceAsyncClient, +) +from google.cloud.discoveryengine_v1beta.services.search_service.client import ( + SearchServiceClient, +) from google.cloud.discoveryengine_v1beta.services.user_event_service.async_client import ( UserEventServiceAsyncClient, ) from google.cloud.discoveryengine_v1beta.services.user_event_service.client import ( UserEventServiceClient, ) -from google.cloud.discoveryengine_v1beta.types.common import CustomAttribute, UserInfo +from google.cloud.discoveryengine_v1beta.types.common import ( + CustomAttribute, + Interval, + UserInfo, +) +from google.cloud.discoveryengine_v1beta.types.completion_service import ( + CompleteQueryRequest, + CompleteQueryResponse, +) from google.cloud.discoveryengine_v1beta.types.document import Document from google.cloud.discoveryengine_v1beta.types.document_service import ( CreateDocumentRequest, @@ -57,10 +83,31 @@ ImportUserEventsRequest, ImportUserEventsResponse, ) +from google.cloud.discoveryengine_v1beta.types.purge_config import ( + PurgeDocumentsMetadata, + PurgeDocumentsRequest, + PurgeDocumentsResponse, +) from google.cloud.discoveryengine_v1beta.types.recommendation_service import ( RecommendRequest, RecommendResponse, ) +from google.cloud.discoveryengine_v1beta.types.schema import Schema +from google.cloud.discoveryengine_v1beta.types.schema_service import ( + CreateSchemaMetadata, + CreateSchemaRequest, + DeleteSchemaMetadata, + DeleteSchemaRequest, + GetSchemaRequest, + ListSchemasRequest, + ListSchemasResponse, + UpdateSchemaMetadata, + UpdateSchemaRequest, +) +from google.cloud.discoveryengine_v1beta.types.search_service import ( + SearchRequest, + SearchResponse, +) from google.cloud.discoveryengine_v1beta.types.user_event import ( CompletionInfo, DocumentInfo, @@ -77,14 +124,23 @@ ) __all__ = ( + "CompletionServiceClient", + "CompletionServiceAsyncClient", "DocumentServiceClient", "DocumentServiceAsyncClient", "RecommendationServiceClient", "RecommendationServiceAsyncClient", + "SchemaServiceClient", + "SchemaServiceAsyncClient", + "SearchServiceClient", + "SearchServiceAsyncClient", "UserEventServiceClient", "UserEventServiceAsyncClient", "CustomAttribute", + "Interval", "UserInfo", + "CompleteQueryRequest", + "CompleteQueryResponse", "Document", "CreateDocumentRequest", "DeleteDocumentRequest", @@ -101,8 +157,23 @@ "ImportUserEventsMetadata", "ImportUserEventsRequest", "ImportUserEventsResponse", + "PurgeDocumentsMetadata", + "PurgeDocumentsRequest", + "PurgeDocumentsResponse", "RecommendRequest", "RecommendResponse", + "Schema", + "CreateSchemaMetadata", + "CreateSchemaRequest", + "DeleteSchemaMetadata", + "DeleteSchemaRequest", + "GetSchemaRequest", + "ListSchemasRequest", + "ListSchemasResponse", + "UpdateSchemaMetadata", + "UpdateSchemaRequest", + "SearchRequest", + "SearchResponse", "CompletionInfo", "DocumentInfo", "MediaInfo", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py index 056efd287f1d..ba128f3c4137 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.3.1" # {x-release-please-version} +__version__ = "0.8.0" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py new file mode 100644 index 000000000000..3041f609d230 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.completion_service import ( + CompletionServiceAsyncClient, + CompletionServiceClient, +) +from .services.document_service import DocumentServiceAsyncClient, DocumentServiceClient +from .services.schema_service import SchemaServiceAsyncClient, SchemaServiceClient +from .services.search_service import SearchServiceAsyncClient, SearchServiceClient +from .services.user_event_service import ( + UserEventServiceAsyncClient, + UserEventServiceClient, +) +from .types.common import CustomAttribute, UserInfo +from .types.completion_service import CompleteQueryRequest, CompleteQueryResponse +from .types.document import Document +from .types.document_service import ( + CreateDocumentRequest, + DeleteDocumentRequest, + GetDocumentRequest, + ListDocumentsRequest, + ListDocumentsResponse, + UpdateDocumentRequest, +) +from .types.import_config import ( + BigQuerySource, + GcsSource, + ImportDocumentsMetadata, + ImportDocumentsRequest, + ImportDocumentsResponse, + ImportErrorConfig, + ImportUserEventsMetadata, + ImportUserEventsRequest, + ImportUserEventsResponse, +) +from .types.purge_config import ( + PurgeDocumentsMetadata, + PurgeDocumentsRequest, + PurgeDocumentsResponse, +) +from .types.schema import Schema +from .types.schema_service import ( + CreateSchemaMetadata, + CreateSchemaRequest, + DeleteSchemaMetadata, + DeleteSchemaRequest, + GetSchemaRequest, + ListSchemasRequest, + ListSchemasResponse, + UpdateSchemaMetadata, + UpdateSchemaRequest, +) +from .types.search_service import SearchRequest, SearchResponse +from .types.user_event import ( + CompletionInfo, + DocumentInfo, + MediaInfo, + PageInfo, + PanelInfo, + SearchInfo, + TransactionInfo, + UserEvent, +) +from .types.user_event_service import CollectUserEventRequest, WriteUserEventRequest + +__all__ = ( + "CompletionServiceAsyncClient", + "DocumentServiceAsyncClient", + "SchemaServiceAsyncClient", + "SearchServiceAsyncClient", + "UserEventServiceAsyncClient", + "BigQuerySource", + "CollectUserEventRequest", + "CompleteQueryRequest", + "CompleteQueryResponse", + "CompletionInfo", + "CompletionServiceClient", + "CreateDocumentRequest", + "CreateSchemaMetadata", + "CreateSchemaRequest", + "CustomAttribute", + "DeleteDocumentRequest", + "DeleteSchemaMetadata", + "DeleteSchemaRequest", + "Document", + "DocumentInfo", + "DocumentServiceClient", + "GcsSource", + "GetDocumentRequest", + "GetSchemaRequest", + "ImportDocumentsMetadata", + "ImportDocumentsRequest", + "ImportDocumentsResponse", + "ImportErrorConfig", + "ImportUserEventsMetadata", + "ImportUserEventsRequest", + "ImportUserEventsResponse", + "ListDocumentsRequest", + "ListDocumentsResponse", + "ListSchemasRequest", + "ListSchemasResponse", + "MediaInfo", + "PageInfo", + "PanelInfo", + "PurgeDocumentsMetadata", + "PurgeDocumentsRequest", + "PurgeDocumentsResponse", + "Schema", + "SchemaServiceClient", + "SearchInfo", + "SearchRequest", + "SearchResponse", + "SearchServiceClient", + "TransactionInfo", + "UpdateDocumentRequest", + "UpdateSchemaMetadata", + "UpdateSchemaRequest", + "UserEvent", + "UserEventServiceClient", + "UserInfo", + "WriteUserEventRequest", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json new file mode 100644 index 000000000000..855b383653f0 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json @@ -0,0 +1,359 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.discoveryengine_v1", + "protoPackage": "google.cloud.discoveryengine.v1", + "schema": "1.0", + "services": { + "CompletionService": { + "clients": { + "grpc": { + "libraryClient": "CompletionServiceClient", + "rpcs": { + "CompleteQuery": { + "methods": [ + "complete_query" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CompletionServiceAsyncClient", + "rpcs": { + "CompleteQuery": { + "methods": [ + "complete_query" + ] + } + } + }, + "rest": { + "libraryClient": "CompletionServiceClient", + "rpcs": { + "CompleteQuery": { + "methods": [ + "complete_query" + ] + } + } + } + } + }, + "DocumentService": { + "clients": { + "grpc": { + "libraryClient": "DocumentServiceClient", + "rpcs": { + "CreateDocument": { + "methods": [ + "create_document" + ] + }, + "DeleteDocument": { + "methods": [ + "delete_document" + ] + }, + "GetDocument": { + "methods": [ + "get_document" + ] + }, + "ImportDocuments": { + "methods": [ + "import_documents" + ] + }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, + "PurgeDocuments": { + "methods": [ + "purge_documents" + ] + }, + "UpdateDocument": { + "methods": [ + "update_document" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DocumentServiceAsyncClient", + "rpcs": { + "CreateDocument": { + "methods": [ + "create_document" + ] + }, + "DeleteDocument": { + "methods": [ + "delete_document" + ] + }, + "GetDocument": { + "methods": [ + "get_document" + ] + }, + "ImportDocuments": { + "methods": [ + "import_documents" + ] + }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, + "PurgeDocuments": { + "methods": [ + "purge_documents" + ] + }, + "UpdateDocument": { + "methods": [ + "update_document" + ] + } + } + }, + "rest": { + "libraryClient": "DocumentServiceClient", + "rpcs": { + "CreateDocument": { + "methods": [ + "create_document" + ] + }, + "DeleteDocument": { + "methods": [ + "delete_document" + ] + }, + "GetDocument": { + "methods": [ + "get_document" + ] + }, + "ImportDocuments": { + "methods": [ + "import_documents" + ] + }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, + "PurgeDocuments": { + "methods": [ + "purge_documents" + ] + }, + "UpdateDocument": { + "methods": [ + "update_document" + ] + } + } + } + } + }, + "SchemaService": { + "clients": { + "grpc": { + "libraryClient": "SchemaServiceClient", + "rpcs": { + "CreateSchema": { + "methods": [ + "create_schema" + ] + }, + "DeleteSchema": { + "methods": [ + "delete_schema" + ] + }, + "GetSchema": { + "methods": [ + "get_schema" + ] + }, + "ListSchemas": { + "methods": [ + "list_schemas" + ] + }, + "UpdateSchema": { + "methods": [ + "update_schema" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SchemaServiceAsyncClient", + "rpcs": { + "CreateSchema": { + "methods": [ + "create_schema" + ] + }, + "DeleteSchema": { + "methods": [ + "delete_schema" + ] + }, + "GetSchema": { + "methods": [ + "get_schema" + ] + }, + "ListSchemas": { + "methods": [ + "list_schemas" + ] + }, + "UpdateSchema": { + "methods": [ + "update_schema" + ] + } + } + }, + "rest": { + "libraryClient": "SchemaServiceClient", + "rpcs": { + "CreateSchema": { + "methods": [ + "create_schema" + ] + }, + "DeleteSchema": { + "methods": [ + "delete_schema" + ] + }, + "GetSchema": { + "methods": [ + "get_schema" + ] + }, + "ListSchemas": { + "methods": [ + "list_schemas" + ] + }, + "UpdateSchema": { + "methods": [ + "update_schema" + ] + } + } + } + } + }, + "SearchService": { + "clients": { + "grpc": { + "libraryClient": "SearchServiceClient", + "rpcs": { + "Search": { + "methods": [ + "search" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SearchServiceAsyncClient", + "rpcs": { + "Search": { + "methods": [ + "search" + ] + } + } + }, + "rest": { + "libraryClient": "SearchServiceClient", + "rpcs": { + "Search": { + "methods": [ + "search" + ] + } + } + } + } + }, + "UserEventService": { + "clients": { + "grpc": { + "libraryClient": "UserEventServiceClient", + "rpcs": { + "CollectUserEvent": { + "methods": [ + "collect_user_event" + ] + }, + "ImportUserEvents": { + "methods": [ + "import_user_events" + ] + }, + "WriteUserEvent": { + "methods": [ + "write_user_event" + ] + } + } + }, + "grpc-async": { + "libraryClient": "UserEventServiceAsyncClient", + "rpcs": { + "CollectUserEvent": { + "methods": [ + "collect_user_event" + ] + }, + "ImportUserEvents": { + "methods": [ + "import_user_events" + ] + }, + "WriteUserEvent": { + "methods": [ + "write_user_event" + ] + } + } + }, + "rest": { + "libraryClient": "UserEventServiceClient", + "rpcs": { + "CollectUserEvent": { + "methods": [ + "collect_user_event" + ] + }, + "ImportUserEvents": { + "methods": [ + "import_user_events" + ] + }, + "WriteUserEvent": { + "methods": [ + "write_user_event" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py new file mode 100644 index 000000000000..b88d7a709a13 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.8.0" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/py.typed b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/py.typed new file mode 100644 index 000000000000..2ff6437cf473 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-discoveryengine package uses inline types. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/__init__.py new file mode 100644 index 000000000000..417d6a6a941c --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import CompletionServiceAsyncClient +from .client import CompletionServiceClient + +__all__ = ( + "CompletionServiceClient", + "CompletionServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/async_client.py new file mode 100644 index 000000000000..4f1715fdf232 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/async_client.py @@ -0,0 +1,424 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 + +from google.cloud.discoveryengine_v1.types import completion_service + +from .client import CompletionServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, CompletionServiceTransport +from .transports.grpc_asyncio import CompletionServiceGrpcAsyncIOTransport + + +class CompletionServiceAsyncClient: + """Service for Auto-Completion.""" + + _client: CompletionServiceClient + + DEFAULT_ENDPOINT = CompletionServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CompletionServiceClient.DEFAULT_MTLS_ENDPOINT + + data_store_path = staticmethod(CompletionServiceClient.data_store_path) + parse_data_store_path = staticmethod(CompletionServiceClient.parse_data_store_path) + common_billing_account_path = staticmethod( + CompletionServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CompletionServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(CompletionServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + CompletionServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + CompletionServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + CompletionServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(CompletionServiceClient.common_project_path) + parse_common_project_path = staticmethod( + CompletionServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(CompletionServiceClient.common_location_path) + parse_common_location_path = staticmethod( + CompletionServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CompletionServiceAsyncClient: The constructed client. + """ + return CompletionServiceClient.from_service_account_info.__func__(CompletionServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CompletionServiceAsyncClient: The constructed client. + """ + return CompletionServiceClient.from_service_account_file.__func__(CompletionServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CompletionServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CompletionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CompletionServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(CompletionServiceClient).get_transport_class, type(CompletionServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CompletionServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the completion service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CompletionServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CompletionServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def complete_query( + self, + request: Optional[Union[completion_service.CompleteQueryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> completion_service.CompleteQueryResponse: + r"""Completes the specified user input with keyword + suggestions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_complete_query(): + # Create a client + client = discoveryengine_v1.CompletionServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CompleteQueryRequest( + data_store="data_store_value", + query="query_value", + ) + + # Make the request + response = await client.complete_query(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.CompleteQueryRequest, dict]]): + The request object. Request message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1.CompletionService.CompleteQuery] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.CompleteQueryResponse: + Response message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1.CompletionService.CompleteQuery] + method. + + """ + # Create or coerce a protobuf request object. + request = completion_service.CompleteQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.complete_query, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CompletionServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py new file mode 100644 index 000000000000..c589da36c457 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py @@ -0,0 +1,661 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 + +from google.cloud.discoveryengine_v1.types import completion_service + +from .transports.base import DEFAULT_CLIENT_INFO, CompletionServiceTransport +from .transports.grpc import CompletionServiceGrpcTransport +from .transports.grpc_asyncio import CompletionServiceGrpcAsyncIOTransport +from .transports.rest import CompletionServiceRestTransport + + +class CompletionServiceClientMeta(type): + """Metaclass for the CompletionService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[CompletionServiceTransport]] + _transport_registry["grpc"] = CompletionServiceGrpcTransport + _transport_registry["grpc_asyncio"] = CompletionServiceGrpcAsyncIOTransport + _transport_registry["rest"] = CompletionServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CompletionServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CompletionServiceClient(metaclass=CompletionServiceClientMeta): + """Service for Auto-Completion.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CompletionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CompletionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CompletionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CompletionServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CompletionServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the completion service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CompletionServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CompletionServiceTransport): + # transport is a CompletionServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def complete_query( + self, + request: Optional[Union[completion_service.CompleteQueryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> completion_service.CompleteQueryResponse: + r"""Completes the specified user input with keyword + suggestions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_complete_query(): + # Create a client + client = discoveryengine_v1.CompletionServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CompleteQueryRequest( + data_store="data_store_value", + query="query_value", + ) + + # Make the request + response = client.complete_query(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.CompleteQueryRequest, dict]): + The request object. Request message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1.CompletionService.CompleteQuery] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.CompleteQueryResponse: + Response message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1.CompletionService.CompleteQuery] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a completion_service.CompleteQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, completion_service.CompleteQueryRequest): + request = completion_service.CompleteQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.complete_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CompletionServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CompletionServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/__init__.py new file mode 100644 index 000000000000..b0fd290e6843 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CompletionServiceTransport +from .grpc import CompletionServiceGrpcTransport +from .grpc_asyncio import CompletionServiceGrpcAsyncIOTransport +from .rest import CompletionServiceRestInterceptor, CompletionServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CompletionServiceTransport]] +_transport_registry["grpc"] = CompletionServiceGrpcTransport +_transport_registry["grpc_asyncio"] = CompletionServiceGrpcAsyncIOTransport +_transport_registry["rest"] = CompletionServiceRestTransport + +__all__ = ( + "CompletionServiceTransport", + "CompletionServiceGrpcTransport", + "CompletionServiceGrpcAsyncIOTransport", + "CompletionServiceRestTransport", + "CompletionServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/base.py new file mode 100644 index 000000000000..67e1d19b207c --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/base.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version +from google.cloud.discoveryengine_v1.types import completion_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class CompletionServiceTransport(abc.ABC): + """Abstract transport class for CompletionService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.complete_query: gapic_v1.method.wrap_method( + self.complete_query, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def complete_query( + self, + ) -> Callable[ + [completion_service.CompleteQueryRequest], + Union[ + completion_service.CompleteQueryResponse, + Awaitable[completion_service.CompleteQueryResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("CompletionServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc.py new file mode 100644 index 000000000000..2345064fed85 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc.py @@ -0,0 +1,306 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 +import grpc # type: ignore + +from google.cloud.discoveryengine_v1.types import completion_service + +from .base import DEFAULT_CLIENT_INFO, CompletionServiceTransport + + +class CompletionServiceGrpcTransport(CompletionServiceTransport): + """gRPC backend transport for CompletionService. + + Service for Auto-Completion. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def complete_query( + self, + ) -> Callable[ + [completion_service.CompleteQueryRequest], + completion_service.CompleteQueryResponse, + ]: + r"""Return a callable for the complete query method over gRPC. + + Completes the specified user input with keyword + suggestions. + + Returns: + Callable[[~.CompleteQueryRequest], + ~.CompleteQueryResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "complete_query" not in self._stubs: + self._stubs["complete_query"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.CompletionService/CompleteQuery", + request_serializer=completion_service.CompleteQueryRequest.serialize, + response_deserializer=completion_service.CompleteQueryResponse.deserialize, + ) + return self._stubs["complete_query"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("CompletionServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..e266f35f059d --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc_asyncio.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1.types import completion_service + +from .base import DEFAULT_CLIENT_INFO, CompletionServiceTransport +from .grpc import CompletionServiceGrpcTransport + + +class CompletionServiceGrpcAsyncIOTransport(CompletionServiceTransport): + """gRPC AsyncIO backend transport for CompletionService. + + Service for Auto-Completion. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def complete_query( + self, + ) -> Callable[ + [completion_service.CompleteQueryRequest], + Awaitable[completion_service.CompleteQueryResponse], + ]: + r"""Return a callable for the complete query method over gRPC. + + Completes the specified user input with keyword + suggestions. + + Returns: + Callable[[~.CompleteQueryRequest], + Awaitable[~.CompleteQueryResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "complete_query" not in self._stubs: + self._stubs["complete_query"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.CompletionService/CompleteQuery", + request_serializer=completion_service.CompleteQueryRequest.serialize, + response_deserializer=completion_service.CompleteQueryResponse.deserialize, + ) + return self._stubs["complete_query"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("CompletionServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py new file mode 100644 index 000000000000..a49e2d4a572a --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py @@ -0,0 +1,577 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.discoveryengine_v1.types import completion_service + +from .base import CompletionServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CompletionServiceRestInterceptor: + """Interceptor for CompletionService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CompletionServiceRestTransport. + + .. code-block:: python + class MyCustomCompletionServiceInterceptor(CompletionServiceRestInterceptor): + def pre_complete_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_complete_query(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CompletionServiceRestTransport(interceptor=MyCustomCompletionServiceInterceptor()) + client = CompletionServiceClient(transport=transport) + + + """ + + def pre_complete_query( + self, + request: completion_service.CompleteQueryRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[completion_service.CompleteQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for complete_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the CompletionService server. + """ + return request, metadata + + def post_complete_query( + self, response: completion_service.CompleteQueryResponse + ) -> completion_service.CompleteQueryResponse: + """Post-rpc interceptor for complete_query + + Override in a subclass to manipulate the response + after it is returned by the CompletionService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CompletionService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the CompletionService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CompletionService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the CompletionService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CompletionServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CompletionServiceRestInterceptor + + +class CompletionServiceRestTransport(CompletionServiceTransport): + """REST backend transport for CompletionService. + + Service for Auto-Completion. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CompletionServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CompletionServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CompleteQuery(CompletionServiceRestStub): + def __hash__(self): + return hash("CompleteQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "query": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: completion_service.CompleteQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> completion_service.CompleteQueryResponse: + r"""Call the complete query method over HTTP. + + Args: + request (~.completion_service.CompleteQueryRequest): + The request object. Request message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1.CompletionService.CompleteQuery] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.completion_service.CompleteQueryResponse: + Response message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1.CompletionService.CompleteQuery] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{data_store=projects/*/locations/*/dataStores/*}:completeQuery", + }, + { + "method": "get", + "uri": "/v1/{data_store=projects/*/locations/*/collections/*/dataStores/*}:completeQuery", + }, + ] + request, metadata = self._interceptor.pre_complete_query(request, metadata) + pb_request = completion_service.CompleteQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = completion_service.CompleteQueryResponse() + pb_resp = completion_service.CompleteQueryResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_complete_query(resp) + return resp + + @property + def complete_query( + self, + ) -> Callable[ + [completion_service.CompleteQueryRequest], + completion_service.CompleteQueryResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CompleteQuery(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(CompletionServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(CompletionServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CompletionServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/__init__.py new file mode 100644 index 000000000000..e3faf2082d0e --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import DocumentServiceAsyncClient +from .client import DocumentServiceClient + +__all__ = ( + "DocumentServiceClient", + "DocumentServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py new file mode 100644 index 000000000000..344527db15cf --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py @@ -0,0 +1,1153 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import struct_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.services.document_service import pagers +from google.cloud.discoveryengine_v1.types import ( + document_service, + import_config, + purge_config, +) +from google.cloud.discoveryengine_v1.types import document +from google.cloud.discoveryengine_v1.types import document as gcd_document + +from .client import DocumentServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, DocumentServiceTransport +from .transports.grpc_asyncio import DocumentServiceGrpcAsyncIOTransport + + +class DocumentServiceAsyncClient: + """Service for ingesting + [Document][google.cloud.discoveryengine.v1.Document] information of + the customer's website. + """ + + _client: DocumentServiceClient + + DEFAULT_ENDPOINT = DocumentServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DocumentServiceClient.DEFAULT_MTLS_ENDPOINT + + branch_path = staticmethod(DocumentServiceClient.branch_path) + parse_branch_path = staticmethod(DocumentServiceClient.parse_branch_path) + document_path = staticmethod(DocumentServiceClient.document_path) + parse_document_path = staticmethod(DocumentServiceClient.parse_document_path) + common_billing_account_path = staticmethod( + DocumentServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DocumentServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DocumentServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + DocumentServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + DocumentServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DocumentServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(DocumentServiceClient.common_project_path) + parse_common_project_path = staticmethod( + DocumentServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(DocumentServiceClient.common_location_path) + parse_common_location_path = staticmethod( + DocumentServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DocumentServiceAsyncClient: The constructed client. + """ + return DocumentServiceClient.from_service_account_info.__func__(DocumentServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DocumentServiceAsyncClient: The constructed client. + """ + return DocumentServiceClient.from_service_account_file.__func__(DocumentServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DocumentServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DocumentServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DocumentServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(DocumentServiceClient).get_transport_class, type(DocumentServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DocumentServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the document service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DocumentServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DocumentServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_document( + self, + request: Optional[Union[document_service.GetDocumentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document.Document: + r"""Gets a [Document][google.cloud.discoveryengine.v1.Document]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_get_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_document(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.GetDocumentRequest, dict]]): + The request object. Request message for + [DocumentService.GetDocument][google.cloud.discoveryengine.v1.DocumentService.GetDocument] + method. + name (:class:`str`): + Required. Full resource name of + [Document][google.cloud.discoveryengine.v1.Document], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}``. + + If the caller does not have permission to access the + [Document][google.cloud.discoveryengine.v1.Document], + regardless of whether or not it exists, a + ``PERMISSION_DENIED`` error is returned. + + If the requested + [Document][google.cloud.discoveryengine.v1.Document] + does not exist, a ``NOT_FOUND`` error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Document: + Document captures all raw metadata + information of items to be recommended + or searched. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = document_service.GetDocumentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_document, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_documents( + self, + request: Optional[Union[document_service.ListDocumentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDocumentsAsyncPager: + r"""Gets a list of + [Document][google.cloud.discoveryengine.v1.Document]s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_list_documents(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.ListDocumentsRequest, dict]]): + The request object. Request message for + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] + method. + parent (:class:`str`): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + Use ``default_branch`` as the branch ID, to list + documents under the default branch. + + If the caller does not have permission to list + [Documents][]s under this branch, regardless of whether + or not this branch exists, a ``PERMISSION_DENIED`` error + is returned. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.document_service.pagers.ListDocumentsAsyncPager: + Response message for + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = document_service.ListDocumentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_documents, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDocumentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_document( + self, + request: Optional[Union[document_service.CreateDocumentRequest, dict]] = None, + *, + parent: Optional[str] = None, + document: Optional[gcd_document.Document] = None, + document_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_document.Document: + r"""Creates a [Document][google.cloud.discoveryengine.v1.Document]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_create_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CreateDocumentRequest( + parent="parent_value", + document_id="document_id_value", + ) + + # Make the request + response = await client.create_document(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.CreateDocumentRequest, dict]]): + The request object. Request message for + [DocumentService.CreateDocument][google.cloud.discoveryengine.v1.DocumentService.CreateDocument] + method. + parent (:class:`str`): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + document (:class:`google.cloud.discoveryengine_v1.types.Document`): + Required. The + [Document][google.cloud.discoveryengine.v1.Document] to + create. + + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + document_id (:class:`str`): + Required. The ID to use for the + [Document][google.cloud.discoveryengine.v1.Document], + which will become the final component of the + [Document.name][google.cloud.discoveryengine.v1.Document.name]. + + If the caller does not have permission to create the + [Document][google.cloud.discoveryengine.v1.Document], + regardless of whether or not it exists, a + ``PERMISSION_DENIED`` error is returned. + + This field must be unique among all + [Document][google.cloud.discoveryengine.v1.Document]s + with the same + [parent][google.cloud.discoveryengine.v1.CreateDocumentRequest.parent]. + Otherwise, an ``ALREADY_EXISTS`` error is returned. + + This field must conform to + `RFC-1034 `__ + standard with a length limit of 63 characters. + Otherwise, an ``INVALID_ARGUMENT`` error is returned. + + This corresponds to the ``document_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Document: + Document captures all raw metadata + information of items to be recommended + or searched. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, document, document_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = document_service.CreateDocumentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if document is not None: + request.document = document + if document_id is not None: + request.document_id = document_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_document, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_document( + self, + request: Optional[Union[document_service.UpdateDocumentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document.Document: + r"""Updates a [Document][google.cloud.discoveryengine.v1.Document]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_update_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.UpdateDocumentRequest( + ) + + # Make the request + response = await client.update_document(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.UpdateDocumentRequest, dict]]): + The request object. Request message for + [DocumentService.UpdateDocument][google.cloud.discoveryengine.v1.DocumentService.UpdateDocument] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Document: + Document captures all raw metadata + information of items to be recommended + or searched. + + """ + # Create or coerce a protobuf request object. + request = document_service.UpdateDocumentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_document, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("document.name", request.document.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_document( + self, + request: Optional[Union[document_service.DeleteDocumentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a [Document][google.cloud.discoveryengine.v1.Document]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_delete_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + await client.delete_document(request=request) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.DeleteDocumentRequest, dict]]): + The request object. Request message for + [DocumentService.DeleteDocument][google.cloud.discoveryengine.v1.DocumentService.DeleteDocument] + method. + name (:class:`str`): + Required. Full resource name of + [Document][google.cloud.discoveryengine.v1.Document], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}``. + + If the caller does not have permission to delete the + [Document][google.cloud.discoveryengine.v1.Document], + regardless of whether or not it exists, a + ``PERMISSION_DENIED`` error is returned. + + If the + [Document][google.cloud.discoveryengine.v1.Document] to + delete does not exist, a ``NOT_FOUND`` error is + returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = document_service.DeleteDocumentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_document, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def import_documents( + self, + request: Optional[Union[import_config.ImportDocumentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Bulk import of multiple + [Document][google.cloud.discoveryengine.v1.Document]s. Request + processing may be synchronous. Non-existing items will be + created. + + Note: It is possible for a subset of the + [Document][google.cloud.discoveryengine.v1.Document]s to be + successfully updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_import_documents(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ImportDocumentsRequest( + parent="parent_value", + ) + + # Make the request + operation = client.import_documents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.ImportDocumentsRequest, dict]]): + The request object. Request message for Import methods. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.ImportDocumentsResponse` Response of the + [ImportDocumentsRequest][google.cloud.discoveryengine.v1.ImportDocumentsRequest]. + If the long running operation is done, then this + message is returned by the + google.longrunning.Operations.response field if the + operation was successful. + + """ + # Create or coerce a protobuf request object. + request = import_config.ImportDocumentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_documents, + default_retry=retries.Retry( + initial=1.0, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + import_config.ImportDocumentsResponse, + metadata_type=import_config.ImportDocumentsMetadata, + ) + + # Done; return the response. + return response + + async def purge_documents( + self, + request: Optional[Union[purge_config.PurgeDocumentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Permanently deletes all selected + [Document][google.cloud.discoveryengine.v1.Document]s in a + branch. + + This process is asynchronous. Depending on the number of + [Document][google.cloud.discoveryengine.v1.Document]s to be + deleted, this operation can take hours to complete. Before the + delete operation completes, some + [Document][google.cloud.discoveryengine.v1.Document]s might + still be returned by + [DocumentService.GetDocument][google.cloud.discoveryengine.v1.DocumentService.GetDocument] + or + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments]. + + To get a list of the + [Document][google.cloud.discoveryengine.v1.Document]s to be + deleted, set + [PurgeDocumentsRequest.force][google.cloud.discoveryengine.v1.PurgeDocumentsRequest.force] + to false. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_purge_documents(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeDocumentsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_documents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.PurgeDocumentsRequest, dict]]): + The request object. Request message for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.PurgeDocumentsResponse` Response message for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments] + method. If the long running operation is successfully + done, then this message is returned by the + google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + request = purge_config.PurgeDocumentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.purge_documents, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + purge_config.PurgeDocumentsResponse, + metadata_type=purge_config.PurgeDocumentsMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DocumentServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py new file mode 100644 index 000000000000..78ce54436158 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py @@ -0,0 +1,1409 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import struct_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.services.document_service import pagers +from google.cloud.discoveryengine_v1.types import ( + document_service, + import_config, + purge_config, +) +from google.cloud.discoveryengine_v1.types import document +from google.cloud.discoveryengine_v1.types import document as gcd_document + +from .transports.base import DEFAULT_CLIENT_INFO, DocumentServiceTransport +from .transports.grpc import DocumentServiceGrpcTransport +from .transports.grpc_asyncio import DocumentServiceGrpcAsyncIOTransport +from .transports.rest import DocumentServiceRestTransport + + +class DocumentServiceClientMeta(type): + """Metaclass for the DocumentService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DocumentServiceTransport]] + _transport_registry["grpc"] = DocumentServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DocumentServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DocumentServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DocumentServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DocumentServiceClient(metaclass=DocumentServiceClientMeta): + """Service for ingesting + [Document][google.cloud.discoveryengine.v1.Document] information of + the customer's website. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DocumentServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DocumentServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DocumentServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DocumentServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def branch_path( + project: str, + location: str, + data_store: str, + branch: str, + ) -> str: + """Returns a fully-qualified branch string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + ) + + @staticmethod + def parse_branch_path(path: str) -> Dict[str, str]: + """Parses a branch path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/branches/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def document_path( + project: str, + location: str, + data_store: str, + branch: str, + document: str, + ) -> str: + """Returns a fully-qualified document string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + document=document, + ) + + @staticmethod + def parse_document_path(path: str) -> Dict[str, str]: + """Parses a document path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/branches/(?P.+?)/documents/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DocumentServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the document service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DocumentServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DocumentServiceTransport): + # transport is a DocumentServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get_document( + self, + request: Optional[Union[document_service.GetDocumentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document.Document: + r"""Gets a [Document][google.cloud.discoveryengine.v1.Document]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_get_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = client.get_document(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.GetDocumentRequest, dict]): + The request object. Request message for + [DocumentService.GetDocument][google.cloud.discoveryengine.v1.DocumentService.GetDocument] + method. + name (str): + Required. Full resource name of + [Document][google.cloud.discoveryengine.v1.Document], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}``. + + If the caller does not have permission to access the + [Document][google.cloud.discoveryengine.v1.Document], + regardless of whether or not it exists, a + ``PERMISSION_DENIED`` error is returned. + + If the requested + [Document][google.cloud.discoveryengine.v1.Document] + does not exist, a ``NOT_FOUND`` error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Document: + Document captures all raw metadata + information of items to be recommended + or searched. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a document_service.GetDocumentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, document_service.GetDocumentRequest): + request = document_service.GetDocumentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_documents( + self, + request: Optional[Union[document_service.ListDocumentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDocumentsPager: + r"""Gets a list of + [Document][google.cloud.discoveryengine.v1.Document]s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_list_documents(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.ListDocumentsRequest, dict]): + The request object. Request message for + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] + method. + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + Use ``default_branch`` as the branch ID, to list + documents under the default branch. + + If the caller does not have permission to list + [Documents][]s under this branch, regardless of whether + or not this branch exists, a ``PERMISSION_DENIED`` error + is returned. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.document_service.pagers.ListDocumentsPager: + Response message for + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a document_service.ListDocumentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, document_service.ListDocumentsRequest): + request = document_service.ListDocumentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_documents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDocumentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_document( + self, + request: Optional[Union[document_service.CreateDocumentRequest, dict]] = None, + *, + parent: Optional[str] = None, + document: Optional[gcd_document.Document] = None, + document_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_document.Document: + r"""Creates a [Document][google.cloud.discoveryengine.v1.Document]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_create_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CreateDocumentRequest( + parent="parent_value", + document_id="document_id_value", + ) + + # Make the request + response = client.create_document(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.CreateDocumentRequest, dict]): + The request object. Request message for + [DocumentService.CreateDocument][google.cloud.discoveryengine.v1.DocumentService.CreateDocument] + method. + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + document (google.cloud.discoveryengine_v1.types.Document): + Required. The + [Document][google.cloud.discoveryengine.v1.Document] to + create. + + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + document_id (str): + Required. The ID to use for the + [Document][google.cloud.discoveryengine.v1.Document], + which will become the final component of the + [Document.name][google.cloud.discoveryengine.v1.Document.name]. + + If the caller does not have permission to create the + [Document][google.cloud.discoveryengine.v1.Document], + regardless of whether or not it exists, a + ``PERMISSION_DENIED`` error is returned. + + This field must be unique among all + [Document][google.cloud.discoveryengine.v1.Document]s + with the same + [parent][google.cloud.discoveryengine.v1.CreateDocumentRequest.parent]. + Otherwise, an ``ALREADY_EXISTS`` error is returned. + + This field must conform to + `RFC-1034 `__ + standard with a length limit of 63 characters. + Otherwise, an ``INVALID_ARGUMENT`` error is returned. + + This corresponds to the ``document_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Document: + Document captures all raw metadata + information of items to be recommended + or searched. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, document, document_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a document_service.CreateDocumentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, document_service.CreateDocumentRequest): + request = document_service.CreateDocumentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if document is not None: + request.document = document + if document_id is not None: + request.document_id = document_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_document( + self, + request: Optional[Union[document_service.UpdateDocumentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document.Document: + r"""Updates a [Document][google.cloud.discoveryengine.v1.Document]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_update_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.UpdateDocumentRequest( + ) + + # Make the request + response = client.update_document(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.UpdateDocumentRequest, dict]): + The request object. Request message for + [DocumentService.UpdateDocument][google.cloud.discoveryengine.v1.DocumentService.UpdateDocument] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Document: + Document captures all raw metadata + information of items to be recommended + or searched. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a document_service.UpdateDocumentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, document_service.UpdateDocumentRequest): + request = document_service.UpdateDocumentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("document.name", request.document.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_document( + self, + request: Optional[Union[document_service.DeleteDocumentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a [Document][google.cloud.discoveryengine.v1.Document]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_delete_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + client.delete_document(request=request) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.DeleteDocumentRequest, dict]): + The request object. Request message for + [DocumentService.DeleteDocument][google.cloud.discoveryengine.v1.DocumentService.DeleteDocument] + method. + name (str): + Required. Full resource name of + [Document][google.cloud.discoveryengine.v1.Document], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}``. + + If the caller does not have permission to delete the + [Document][google.cloud.discoveryengine.v1.Document], + regardless of whether or not it exists, a + ``PERMISSION_DENIED`` error is returned. + + If the + [Document][google.cloud.discoveryengine.v1.Document] to + delete does not exist, a ``NOT_FOUND`` error is + returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a document_service.DeleteDocumentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, document_service.DeleteDocumentRequest): + request = document_service.DeleteDocumentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def import_documents( + self, + request: Optional[Union[import_config.ImportDocumentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Bulk import of multiple + [Document][google.cloud.discoveryengine.v1.Document]s. Request + processing may be synchronous. Non-existing items will be + created. + + Note: It is possible for a subset of the + [Document][google.cloud.discoveryengine.v1.Document]s to be + successfully updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_import_documents(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ImportDocumentsRequest( + parent="parent_value", + ) + + # Make the request + operation = client.import_documents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.ImportDocumentsRequest, dict]): + The request object. Request message for Import methods. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.ImportDocumentsResponse` Response of the + [ImportDocumentsRequest][google.cloud.discoveryengine.v1.ImportDocumentsRequest]. + If the long running operation is done, then this + message is returned by the + google.longrunning.Operations.response field if the + operation was successful. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a import_config.ImportDocumentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, import_config.ImportDocumentsRequest): + request = import_config.ImportDocumentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_documents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + import_config.ImportDocumentsResponse, + metadata_type=import_config.ImportDocumentsMetadata, + ) + + # Done; return the response. + return response + + def purge_documents( + self, + request: Optional[Union[purge_config.PurgeDocumentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Permanently deletes all selected + [Document][google.cloud.discoveryengine.v1.Document]s in a + branch. + + This process is asynchronous. Depending on the number of + [Document][google.cloud.discoveryengine.v1.Document]s to be + deleted, this operation can take hours to complete. Before the + delete operation completes, some + [Document][google.cloud.discoveryengine.v1.Document]s might + still be returned by + [DocumentService.GetDocument][google.cloud.discoveryengine.v1.DocumentService.GetDocument] + or + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments]. + + To get a list of the + [Document][google.cloud.discoveryengine.v1.Document]s to be + deleted, set + [PurgeDocumentsRequest.force][google.cloud.discoveryengine.v1.PurgeDocumentsRequest.force] + to false. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_purge_documents(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeDocumentsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_documents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.PurgeDocumentsRequest, dict]): + The request object. Request message for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.PurgeDocumentsResponse` Response message for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments] + method. If the long running operation is successfully + done, then this message is returned by the + google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a purge_config.PurgeDocumentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, purge_config.PurgeDocumentsRequest): + request = purge_config.PurgeDocumentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_documents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + purge_config.PurgeDocumentsResponse, + metadata_type=purge_config.PurgeDocumentsMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DocumentServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DocumentServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/pagers.py new file mode 100644 index 000000000000..7b8f5245a50b --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1.types import document, document_service + + +class ListDocumentsPager: + """A pager for iterating through ``list_documents`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.ListDocumentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``documents`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDocuments`` requests and continue to iterate + through the ``documents`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.ListDocumentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., document_service.ListDocumentsResponse], + request: document_service.ListDocumentsRequest, + response: document_service.ListDocumentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.ListDocumentsRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.ListDocumentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = document_service.ListDocumentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[document_service.ListDocumentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[document.Document]: + for page in self.pages: + yield from page.documents + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDocumentsAsyncPager: + """A pager for iterating through ``list_documents`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.ListDocumentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``documents`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDocuments`` requests and continue to iterate + through the ``documents`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.ListDocumentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[document_service.ListDocumentsResponse]], + request: document_service.ListDocumentsRequest, + response: document_service.ListDocumentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.ListDocumentsRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.ListDocumentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = document_service.ListDocumentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[document_service.ListDocumentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[document.Document]: + async def async_generator(): + async for page in self.pages: + for response in page.documents: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/__init__.py new file mode 100644 index 000000000000..98aadd5d9ab4 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DocumentServiceTransport +from .grpc import DocumentServiceGrpcTransport +from .grpc_asyncio import DocumentServiceGrpcAsyncIOTransport +from .rest import DocumentServiceRestInterceptor, DocumentServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DocumentServiceTransport]] +_transport_registry["grpc"] = DocumentServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DocumentServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DocumentServiceRestTransport + +__all__ = ( + "DocumentServiceTransport", + "DocumentServiceGrpcTransport", + "DocumentServiceGrpcAsyncIOTransport", + "DocumentServiceRestTransport", + "DocumentServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py new file mode 100644 index 000000000000..ea235ca904c9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py @@ -0,0 +1,285 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version +from google.cloud.discoveryengine_v1.types import ( + document_service, + import_config, + purge_config, +) +from google.cloud.discoveryengine_v1.types import document +from google.cloud.discoveryengine_v1.types import document as gcd_document + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class DocumentServiceTransport(abc.ABC): + """Abstract transport class for DocumentService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_document: gapic_v1.method.wrap_method( + self.get_document, + default_timeout=None, + client_info=client_info, + ), + self.list_documents: gapic_v1.method.wrap_method( + self.list_documents, + default_timeout=None, + client_info=client_info, + ), + self.create_document: gapic_v1.method.wrap_method( + self.create_document, + default_timeout=None, + client_info=client_info, + ), + self.update_document: gapic_v1.method.wrap_method( + self.update_document, + default_timeout=None, + client_info=client_info, + ), + self.delete_document: gapic_v1.method.wrap_method( + self.delete_document, + default_timeout=None, + client_info=client_info, + ), + self.import_documents: gapic_v1.method.wrap_method( + self.import_documents, + default_retry=retries.Retry( + initial=1.0, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.purge_documents: gapic_v1.method.wrap_method( + self.purge_documents, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def get_document( + self, + ) -> Callable[ + [document_service.GetDocumentRequest], + Union[document.Document, Awaitable[document.Document]], + ]: + raise NotImplementedError() + + @property + def list_documents( + self, + ) -> Callable[ + [document_service.ListDocumentsRequest], + Union[ + document_service.ListDocumentsResponse, + Awaitable[document_service.ListDocumentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_document( + self, + ) -> Callable[ + [document_service.CreateDocumentRequest], + Union[gcd_document.Document, Awaitable[gcd_document.Document]], + ]: + raise NotImplementedError() + + @property + def update_document( + self, + ) -> Callable[ + [document_service.UpdateDocumentRequest], + Union[document.Document, Awaitable[document.Document]], + ]: + raise NotImplementedError() + + @property + def delete_document( + self, + ) -> Callable[ + [document_service.DeleteDocumentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def import_documents( + self, + ) -> Callable[ + [import_config.ImportDocumentsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def purge_documents( + self, + ) -> Callable[ + [purge_config.PurgeDocumentsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DocumentServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py new file mode 100644 index 000000000000..a4c297347361 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py @@ -0,0 +1,510 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + document_service, + import_config, + purge_config, +) +from google.cloud.discoveryengine_v1.types import document +from google.cloud.discoveryengine_v1.types import document as gcd_document + +from .base import DEFAULT_CLIENT_INFO, DocumentServiceTransport + + +class DocumentServiceGrpcTransport(DocumentServiceTransport): + """gRPC backend transport for DocumentService. + + Service for ingesting + [Document][google.cloud.discoveryengine.v1.Document] information of + the customer's website. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def get_document( + self, + ) -> Callable[[document_service.GetDocumentRequest], document.Document]: + r"""Return a callable for the get document method over gRPC. + + Gets a [Document][google.cloud.discoveryengine.v1.Document]. + + Returns: + Callable[[~.GetDocumentRequest], + ~.Document]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_document" not in self._stubs: + self._stubs["get_document"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/GetDocument", + request_serializer=document_service.GetDocumentRequest.serialize, + response_deserializer=document.Document.deserialize, + ) + return self._stubs["get_document"] + + @property + def list_documents( + self, + ) -> Callable[ + [document_service.ListDocumentsRequest], document_service.ListDocumentsResponse + ]: + r"""Return a callable for the list documents method over gRPC. + + Gets a list of + [Document][google.cloud.discoveryengine.v1.Document]s. + + Returns: + Callable[[~.ListDocumentsRequest], + ~.ListDocumentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_documents" not in self._stubs: + self._stubs["list_documents"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/ListDocuments", + request_serializer=document_service.ListDocumentsRequest.serialize, + response_deserializer=document_service.ListDocumentsResponse.deserialize, + ) + return self._stubs["list_documents"] + + @property + def create_document( + self, + ) -> Callable[[document_service.CreateDocumentRequest], gcd_document.Document]: + r"""Return a callable for the create document method over gRPC. + + Creates a [Document][google.cloud.discoveryengine.v1.Document]. + + Returns: + Callable[[~.CreateDocumentRequest], + ~.Document]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_document" not in self._stubs: + self._stubs["create_document"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/CreateDocument", + request_serializer=document_service.CreateDocumentRequest.serialize, + response_deserializer=gcd_document.Document.deserialize, + ) + return self._stubs["create_document"] + + @property + def update_document( + self, + ) -> Callable[[document_service.UpdateDocumentRequest], document.Document]: + r"""Return a callable for the update document method over gRPC. + + Updates a [Document][google.cloud.discoveryengine.v1.Document]. + + Returns: + Callable[[~.UpdateDocumentRequest], + ~.Document]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_document" not in self._stubs: + self._stubs["update_document"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/UpdateDocument", + request_serializer=document_service.UpdateDocumentRequest.serialize, + response_deserializer=document.Document.deserialize, + ) + return self._stubs["update_document"] + + @property + def delete_document( + self, + ) -> Callable[[document_service.DeleteDocumentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete document method over gRPC. + + Deletes a [Document][google.cloud.discoveryengine.v1.Document]. + + Returns: + Callable[[~.DeleteDocumentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_document" not in self._stubs: + self._stubs["delete_document"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/DeleteDocument", + request_serializer=document_service.DeleteDocumentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_document"] + + @property + def import_documents( + self, + ) -> Callable[[import_config.ImportDocumentsRequest], operations_pb2.Operation]: + r"""Return a callable for the import documents method over gRPC. + + Bulk import of multiple + [Document][google.cloud.discoveryengine.v1.Document]s. Request + processing may be synchronous. Non-existing items will be + created. + + Note: It is possible for a subset of the + [Document][google.cloud.discoveryengine.v1.Document]s to be + successfully updated. + + Returns: + Callable[[~.ImportDocumentsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_documents" not in self._stubs: + self._stubs["import_documents"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/ImportDocuments", + request_serializer=import_config.ImportDocumentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_documents"] + + @property + def purge_documents( + self, + ) -> Callable[[purge_config.PurgeDocumentsRequest], operations_pb2.Operation]: + r"""Return a callable for the purge documents method over gRPC. + + Permanently deletes all selected + [Document][google.cloud.discoveryengine.v1.Document]s in a + branch. + + This process is asynchronous. Depending on the number of + [Document][google.cloud.discoveryengine.v1.Document]s to be + deleted, this operation can take hours to complete. Before the + delete operation completes, some + [Document][google.cloud.discoveryengine.v1.Document]s might + still be returned by + [DocumentService.GetDocument][google.cloud.discoveryengine.v1.DocumentService.GetDocument] + or + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments]. + + To get a list of the + [Document][google.cloud.discoveryengine.v1.Document]s to be + deleted, set + [PurgeDocumentsRequest.force][google.cloud.discoveryengine.v1.PurgeDocumentsRequest.force] + to false. + + Returns: + Callable[[~.PurgeDocumentsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_documents" not in self._stubs: + self._stubs["purge_documents"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/PurgeDocuments", + request_serializer=purge_config.PurgeDocumentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_documents"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DocumentServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..983a6725b831 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py @@ -0,0 +1,520 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + document_service, + import_config, + purge_config, +) +from google.cloud.discoveryengine_v1.types import document +from google.cloud.discoveryengine_v1.types import document as gcd_document + +from .base import DEFAULT_CLIENT_INFO, DocumentServiceTransport +from .grpc import DocumentServiceGrpcTransport + + +class DocumentServiceGrpcAsyncIOTransport(DocumentServiceTransport): + """gRPC AsyncIO backend transport for DocumentService. + + Service for ingesting + [Document][google.cloud.discoveryengine.v1.Document] information of + the customer's website. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def get_document( + self, + ) -> Callable[[document_service.GetDocumentRequest], Awaitable[document.Document]]: + r"""Return a callable for the get document method over gRPC. + + Gets a [Document][google.cloud.discoveryengine.v1.Document]. + + Returns: + Callable[[~.GetDocumentRequest], + Awaitable[~.Document]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_document" not in self._stubs: + self._stubs["get_document"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/GetDocument", + request_serializer=document_service.GetDocumentRequest.serialize, + response_deserializer=document.Document.deserialize, + ) + return self._stubs["get_document"] + + @property + def list_documents( + self, + ) -> Callable[ + [document_service.ListDocumentsRequest], + Awaitable[document_service.ListDocumentsResponse], + ]: + r"""Return a callable for the list documents method over gRPC. + + Gets a list of + [Document][google.cloud.discoveryengine.v1.Document]s. + + Returns: + Callable[[~.ListDocumentsRequest], + Awaitable[~.ListDocumentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_documents" not in self._stubs: + self._stubs["list_documents"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/ListDocuments", + request_serializer=document_service.ListDocumentsRequest.serialize, + response_deserializer=document_service.ListDocumentsResponse.deserialize, + ) + return self._stubs["list_documents"] + + @property + def create_document( + self, + ) -> Callable[ + [document_service.CreateDocumentRequest], Awaitable[gcd_document.Document] + ]: + r"""Return a callable for the create document method over gRPC. + + Creates a [Document][google.cloud.discoveryengine.v1.Document]. + + Returns: + Callable[[~.CreateDocumentRequest], + Awaitable[~.Document]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_document" not in self._stubs: + self._stubs["create_document"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/CreateDocument", + request_serializer=document_service.CreateDocumentRequest.serialize, + response_deserializer=gcd_document.Document.deserialize, + ) + return self._stubs["create_document"] + + @property + def update_document( + self, + ) -> Callable[ + [document_service.UpdateDocumentRequest], Awaitable[document.Document] + ]: + r"""Return a callable for the update document method over gRPC. + + Updates a [Document][google.cloud.discoveryengine.v1.Document]. + + Returns: + Callable[[~.UpdateDocumentRequest], + Awaitable[~.Document]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_document" not in self._stubs: + self._stubs["update_document"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/UpdateDocument", + request_serializer=document_service.UpdateDocumentRequest.serialize, + response_deserializer=document.Document.deserialize, + ) + return self._stubs["update_document"] + + @property + def delete_document( + self, + ) -> Callable[[document_service.DeleteDocumentRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete document method over gRPC. + + Deletes a [Document][google.cloud.discoveryengine.v1.Document]. + + Returns: + Callable[[~.DeleteDocumentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_document" not in self._stubs: + self._stubs["delete_document"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/DeleteDocument", + request_serializer=document_service.DeleteDocumentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_document"] + + @property + def import_documents( + self, + ) -> Callable[ + [import_config.ImportDocumentsRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the import documents method over gRPC. + + Bulk import of multiple + [Document][google.cloud.discoveryengine.v1.Document]s. Request + processing may be synchronous. Non-existing items will be + created. + + Note: It is possible for a subset of the + [Document][google.cloud.discoveryengine.v1.Document]s to be + successfully updated. + + Returns: + Callable[[~.ImportDocumentsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_documents" not in self._stubs: + self._stubs["import_documents"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/ImportDocuments", + request_serializer=import_config.ImportDocumentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_documents"] + + @property + def purge_documents( + self, + ) -> Callable[ + [purge_config.PurgeDocumentsRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the purge documents method over gRPC. + + Permanently deletes all selected + [Document][google.cloud.discoveryengine.v1.Document]s in a + branch. + + This process is asynchronous. Depending on the number of + [Document][google.cloud.discoveryengine.v1.Document]s to be + deleted, this operation can take hours to complete. Before the + delete operation completes, some + [Document][google.cloud.discoveryengine.v1.Document]s might + still be returned by + [DocumentService.GetDocument][google.cloud.discoveryengine.v1.DocumentService.GetDocument] + or + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments]. + + To get a list of the + [Document][google.cloud.discoveryengine.v1.Document]s to be + deleted, set + [PurgeDocumentsRequest.force][google.cloud.discoveryengine.v1.PurgeDocumentsRequest.force] + to false. + + Returns: + Callable[[~.PurgeDocumentsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_documents" not in self._stubs: + self._stubs["purge_documents"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/PurgeDocuments", + request_serializer=purge_config.PurgeDocumentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_documents"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("DocumentServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py new file mode 100644 index 000000000000..dfb81c5a863e --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py @@ -0,0 +1,1520 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + document_service, + import_config, + purge_config, +) +from google.cloud.discoveryengine_v1.types import document +from google.cloud.discoveryengine_v1.types import document as gcd_document + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DocumentServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DocumentServiceRestInterceptor: + """Interceptor for DocumentService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DocumentServiceRestTransport. + + .. code-block:: python + class MyCustomDocumentServiceInterceptor(DocumentServiceRestInterceptor): + def pre_create_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_import_documents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_documents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_documents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_documents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_purge_documents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_purge_documents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_document(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DocumentServiceRestTransport(interceptor=MyCustomDocumentServiceInterceptor()) + client = DocumentServiceClient(transport=transport) + + + """ + + def pre_create_document( + self, + request: document_service.CreateDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[document_service.CreateDocumentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_create_document( + self, response: gcd_document.Document + ) -> gcd_document.Document: + """Post-rpc interceptor for create_document + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + def pre_delete_document( + self, + request: document_service.DeleteDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[document_service.DeleteDocumentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def pre_get_document( + self, + request: document_service.GetDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[document_service.GetDocumentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_get_document(self, response: document.Document) -> document.Document: + """Post-rpc interceptor for get_document + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + def pre_import_documents( + self, + request: import_config.ImportDocumentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[import_config.ImportDocumentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_documents + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_import_documents( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for import_documents + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + def pre_list_documents( + self, + request: document_service.ListDocumentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[document_service.ListDocumentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_documents + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_list_documents( + self, response: document_service.ListDocumentsResponse + ) -> document_service.ListDocumentsResponse: + """Post-rpc interceptor for list_documents + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + def pre_purge_documents( + self, + request: purge_config.PurgeDocumentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[purge_config.PurgeDocumentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for purge_documents + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_purge_documents( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for purge_documents + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + def pre_update_document( + self, + request: document_service.UpdateDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[document_service.UpdateDocumentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_update_document(self, response: document.Document) -> document.Document: + """Post-rpc interceptor for update_document + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DocumentServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DocumentServiceRestInterceptor + + +class DocumentServiceRestTransport(DocumentServiceTransport): + """REST backend transport for DocumentService. + + Service for ingesting + [Document][google.cloud.discoveryengine.v1.Document] information of + the customer's website. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DocumentServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DocumentServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateDocument(DocumentServiceRestStub): + def __hash__(self): + return hash("CreateDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "documentId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service.CreateDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_document.Document: + r"""Call the create document method over HTTP. + + Args: + request (~.document_service.CreateDocumentRequest): + The request object. Request message for + [DocumentService.CreateDocument][google.cloud.discoveryengine.v1.DocumentService.CreateDocument] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_document.Document: + Document captures all raw metadata + information of items to be recommended + or searched. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents", + "body": "document", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/documents", + "body": "document", + }, + ] + request, metadata = self._interceptor.pre_create_document(request, metadata) + pb_request = document_service.CreateDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_document.Document() + pb_resp = gcd_document.Document.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_document(resp) + return resp + + class _DeleteDocument(DocumentServiceRestStub): + def __hash__(self): + return hash("DeleteDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service.DeleteDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete document method over HTTP. + + Args: + request (~.document_service.DeleteDocumentRequest): + The request object. Request message for + [DocumentService.DeleteDocument][google.cloud.discoveryengine.v1.DocumentService.DeleteDocument] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_document(request, metadata) + pb_request = document_service.DeleteDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetDocument(DocumentServiceRestStub): + def __hash__(self): + return hash("GetDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service.GetDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document.Document: + r"""Call the get document method over HTTP. + + Args: + request (~.document_service.GetDocumentRequest): + The request object. Request message for + [DocumentService.GetDocument][google.cloud.discoveryengine.v1.DocumentService.GetDocument] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document.Document: + Document captures all raw metadata + information of items to be recommended + or searched. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*}", + }, + ] + request, metadata = self._interceptor.pre_get_document(request, metadata) + pb_request = document_service.GetDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document.Document() + pb_resp = document.Document.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_document(resp) + return resp + + class _ImportDocuments(DocumentServiceRestStub): + def __hash__(self): + return hash("ImportDocuments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: import_config.ImportDocumentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the import documents method over HTTP. + + Args: + request (~.import_config.ImportDocumentsRequest): + The request object. Request message for Import methods. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents:import", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/documents:import", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_import_documents( + request, metadata + ) + pb_request = import_config.ImportDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_documents(resp) + return resp + + class _ListDocuments(DocumentServiceRestStub): + def __hash__(self): + return hash("ListDocuments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service.ListDocumentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.ListDocumentsResponse: + r"""Call the list documents method over HTTP. + + Args: + request (~.document_service.ListDocumentsRequest): + The request object. Request message for + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.ListDocumentsResponse: + Response message for + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/documents", + }, + ] + request, metadata = self._interceptor.pre_list_documents(request, metadata) + pb_request = document_service.ListDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.ListDocumentsResponse() + pb_resp = document_service.ListDocumentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_documents(resp) + return resp + + class _PurgeDocuments(DocumentServiceRestStub): + def __hash__(self): + return hash("PurgeDocuments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: purge_config.PurgeDocumentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the purge documents method over HTTP. + + Args: + request (~.purge_config.PurgeDocumentsRequest): + The request object. Request message for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents:purge", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/documents:purge", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_purge_documents(request, metadata) + pb_request = purge_config.PurgeDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_purge_documents(resp) + return resp + + class _UpdateDocument(DocumentServiceRestStub): + def __hash__(self): + return hash("UpdateDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service.UpdateDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document.Document: + r"""Call the update document method over HTTP. + + Args: + request (~.document_service.UpdateDocumentRequest): + The request object. Request message for + [DocumentService.UpdateDocument][google.cloud.discoveryengine.v1.DocumentService.UpdateDocument] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document.Document: + Document captures all raw metadata + information of items to be recommended + or searched. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}", + "body": "document", + }, + { + "method": "patch", + "uri": "/v1/{document.name=projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*}", + "body": "document", + }, + ] + request, metadata = self._interceptor.pre_update_document(request, metadata) + pb_request = document_service.UpdateDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document.Document() + pb_resp = document.Document.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_document(resp) + return resp + + @property + def create_document( + self, + ) -> Callable[[document_service.CreateDocumentRequest], gcd_document.Document]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_document( + self, + ) -> Callable[[document_service.DeleteDocumentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_document( + self, + ) -> Callable[[document_service.GetDocumentRequest], document.Document]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def import_documents( + self, + ) -> Callable[[import_config.ImportDocumentsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportDocuments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_documents( + self, + ) -> Callable[ + [document_service.ListDocumentsRequest], document_service.ListDocumentsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDocuments(self._session, self._host, self._interceptor) # type: ignore + + @property + def purge_documents( + self, + ) -> Callable[[purge_config.PurgeDocumentsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PurgeDocuments(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_document( + self, + ) -> Callable[[document_service.UpdateDocumentRequest], document.Document]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(DocumentServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(DocumentServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DocumentServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/__init__.py new file mode 100644 index 000000000000..d61eb9a130a6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SchemaServiceAsyncClient +from .client import SchemaServiceClient + +__all__ = ( + "SchemaServiceClient", + "SchemaServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/async_client.py new file mode 100644 index 000000000000..63bb402cc6e3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/async_client.py @@ -0,0 +1,944 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.services.schema_service import pagers +from google.cloud.discoveryengine_v1.types import schema +from google.cloud.discoveryengine_v1.types import schema as gcd_schema +from google.cloud.discoveryengine_v1.types import schema_service + +from .client import SchemaServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, SchemaServiceTransport +from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport + + +class SchemaServiceAsyncClient: + """Service for managing + [Schema][google.cloud.discoveryengine.v1.Schema]s. + """ + + _client: SchemaServiceClient + + DEFAULT_ENDPOINT = SchemaServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SchemaServiceClient.DEFAULT_MTLS_ENDPOINT + + data_store_path = staticmethod(SchemaServiceClient.data_store_path) + parse_data_store_path = staticmethod(SchemaServiceClient.parse_data_store_path) + schema_path = staticmethod(SchemaServiceClient.schema_path) + parse_schema_path = staticmethod(SchemaServiceClient.parse_schema_path) + common_billing_account_path = staticmethod( + SchemaServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SchemaServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SchemaServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SchemaServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SchemaServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SchemaServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(SchemaServiceClient.common_project_path) + parse_common_project_path = staticmethod( + SchemaServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(SchemaServiceClient.common_location_path) + parse_common_location_path = staticmethod( + SchemaServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceAsyncClient: The constructed client. + """ + return SchemaServiceClient.from_service_account_info.__func__(SchemaServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceAsyncClient: The constructed client. + """ + return SchemaServiceClient.from_service_account_file.__func__(SchemaServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SchemaServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SchemaServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SchemaServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(SchemaServiceClient).get_transport_class, type(SchemaServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SchemaServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the schema service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SchemaServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SchemaServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_schema( + self, + request: Optional[Union[schema_service.GetSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Gets a [Schema][google.cloud.discoveryengine.v1.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_get_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = await client.get_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.GetSchemaRequest, dict]]): + The request object. Request message for + [SchemaService.GetSchema][google.cloud.discoveryengine.v1.SchemaService.GetSchema] + method. + name (:class:`str`): + Required. The full resource name of the schema, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Schema: + Defines the structure and layout of a + type of document data. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = schema_service.GetSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_schemas( + self, + request: Optional[Union[schema_service.ListSchemasRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSchemasAsyncPager: + r"""Gets a list of + [Schema][google.cloud.discoveryengine.v1.Schema]s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_list_schemas(): + # Create a client + client = discoveryengine_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.ListSchemasRequest, dict]]): + The request object. Request message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1.SchemaService.ListSchemas] + method. + parent (:class:`str`): + Required. The parent data store resource name, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.schema_service.pagers.ListSchemasAsyncPager: + Response message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1.SchemaService.ListSchemas] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = schema_service.ListSchemasRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_schemas, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSchemasAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_schema( + self, + request: Optional[Union[schema_service.CreateSchemaRequest, dict]] = None, + *, + parent: Optional[str] = None, + schema: Optional[gcd_schema.Schema] = None, + schema_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a [Schema][google.cloud.discoveryengine.v1.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_create_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CreateSchemaRequest( + parent="parent_value", + schema_id="schema_id_value", + ) + + # Make the request + operation = client.create_schema(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.CreateSchemaRequest, dict]]): + The request object. Request message for + [SchemaService.CreateSchema][google.cloud.discoveryengine.v1.SchemaService.CreateSchema] + method. + parent (:class:`str`): + Required. The parent data store resource name, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (:class:`google.cloud.discoveryengine_v1.types.Schema`): + Required. The + [Schema][google.cloud.discoveryengine.v1.Schema] to + create. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema_id (:class:`str`): + Required. The ID to use for the + [Schema][google.cloud.discoveryengine.v1.Schema], which + will become the final component of the + [Schema.name][google.cloud.discoveryengine.v1.Schema.name]. + + This field should conform to + `RFC-1034 `__ + standard with a length limit of 63 characters. + + This corresponds to the ``schema_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1.types.Schema` + Defines the structure and layout of a type of document + data. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, schema, schema_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = schema_service.CreateSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + if schema_id is not None: + request.schema_id = schema_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcd_schema.Schema, + metadata_type=schema_service.CreateSchemaMetadata, + ) + + # Done; return the response. + return response + + async def update_schema( + self, + request: Optional[Union[schema_service.UpdateSchemaRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a [Schema][google.cloud.discoveryengine.v1.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_update_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.UpdateSchemaRequest( + ) + + # Make the request + operation = client.update_schema(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.UpdateSchemaRequest, dict]]): + The request object. Request message for + [SchemaService.UpdateSchema][google.cloud.discoveryengine.v1.SchemaService.UpdateSchema] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1.types.Schema` + Defines the structure and layout of a type of document + data. + + """ + # Create or coerce a protobuf request object. + request = schema_service.UpdateSchemaRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("schema.name", request.schema.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + schema.Schema, + metadata_type=schema_service.UpdateSchemaMetadata, + ) + + # Done; return the response. + return response + + async def delete_schema( + self, + request: Optional[Union[schema_service.DeleteSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a [Schema][google.cloud.discoveryengine.v1.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_delete_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_schema(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.DeleteSchemaRequest, dict]]): + The request object. Request message for + [SchemaService.DeleteSchema][google.cloud.discoveryengine.v1.SchemaService.DeleteSchema] + method. + name (:class:`str`): + Required. The full resource name of the schema, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = schema_service.DeleteSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=schema_service.DeleteSchemaMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SchemaServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/client.py new file mode 100644 index 000000000000..7bf26004f64e --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/client.py @@ -0,0 +1,1201 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.services.schema_service import pagers +from google.cloud.discoveryengine_v1.types import schema +from google.cloud.discoveryengine_v1.types import schema as gcd_schema +from google.cloud.discoveryengine_v1.types import schema_service + +from .transports.base import DEFAULT_CLIENT_INFO, SchemaServiceTransport +from .transports.grpc import SchemaServiceGrpcTransport +from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport +from .transports.rest import SchemaServiceRestTransport + + +class SchemaServiceClientMeta(type): + """Metaclass for the SchemaService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[SchemaServiceTransport]] + _transport_registry["grpc"] = SchemaServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SchemaServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SchemaServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SchemaServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SchemaServiceClient(metaclass=SchemaServiceClientMeta): + """Service for managing + [Schema][google.cloud.discoveryengine.v1.Schema]s. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SchemaServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SchemaServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def schema_path( + project: str, + location: str, + data_store: str, + schema: str, + ) -> str: + """Returns a fully-qualified schema string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/schemas/{schema}".format( + project=project, + location=location, + data_store=data_store, + schema=schema, + ) + + @staticmethod + def parse_schema_path(path: str) -> Dict[str, str]: + """Parses a schema path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/schemas/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SchemaServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the schema service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SchemaServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SchemaServiceTransport): + # transport is a SchemaServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get_schema( + self, + request: Optional[Union[schema_service.GetSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Gets a [Schema][google.cloud.discoveryengine.v1.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_get_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = client.get_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.GetSchemaRequest, dict]): + The request object. Request message for + [SchemaService.GetSchema][google.cloud.discoveryengine.v1.SchemaService.GetSchema] + method. + name (str): + Required. The full resource name of the schema, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Schema: + Defines the structure and layout of a + type of document data. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a schema_service.GetSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema_service.GetSchemaRequest): + request = schema_service.GetSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_schemas( + self, + request: Optional[Union[schema_service.ListSchemasRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSchemasPager: + r"""Gets a list of + [Schema][google.cloud.discoveryengine.v1.Schema]s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_list_schemas(): + # Create a client + client = discoveryengine_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.ListSchemasRequest, dict]): + The request object. Request message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1.SchemaService.ListSchemas] + method. + parent (str): + Required. The parent data store resource name, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.schema_service.pagers.ListSchemasPager: + Response message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1.SchemaService.ListSchemas] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a schema_service.ListSchemasRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema_service.ListSchemasRequest): + request = schema_service.ListSchemasRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_schemas] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSchemasPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_schema( + self, + request: Optional[Union[schema_service.CreateSchemaRequest, dict]] = None, + *, + parent: Optional[str] = None, + schema: Optional[gcd_schema.Schema] = None, + schema_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a [Schema][google.cloud.discoveryengine.v1.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_create_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CreateSchemaRequest( + parent="parent_value", + schema_id="schema_id_value", + ) + + # Make the request + operation = client.create_schema(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.CreateSchemaRequest, dict]): + The request object. Request message for + [SchemaService.CreateSchema][google.cloud.discoveryengine.v1.SchemaService.CreateSchema] + method. + parent (str): + Required. The parent data store resource name, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (google.cloud.discoveryengine_v1.types.Schema): + Required. The + [Schema][google.cloud.discoveryengine.v1.Schema] to + create. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema_id (str): + Required. The ID to use for the + [Schema][google.cloud.discoveryengine.v1.Schema], which + will become the final component of the + [Schema.name][google.cloud.discoveryengine.v1.Schema.name]. + + This field should conform to + `RFC-1034 `__ + standard with a length limit of 63 characters. + + This corresponds to the ``schema_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1.types.Schema` + Defines the structure and layout of a type of document + data. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, schema, schema_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a schema_service.CreateSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema_service.CreateSchemaRequest): + request = schema_service.CreateSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + if schema_id is not None: + request.schema_id = schema_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcd_schema.Schema, + metadata_type=schema_service.CreateSchemaMetadata, + ) + + # Done; return the response. + return response + + def update_schema( + self, + request: Optional[Union[schema_service.UpdateSchemaRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a [Schema][google.cloud.discoveryengine.v1.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_update_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.UpdateSchemaRequest( + ) + + # Make the request + operation = client.update_schema(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.UpdateSchemaRequest, dict]): + The request object. Request message for + [SchemaService.UpdateSchema][google.cloud.discoveryengine.v1.SchemaService.UpdateSchema] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1.types.Schema` + Defines the structure and layout of a type of document + data. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a schema_service.UpdateSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema_service.UpdateSchemaRequest): + request = schema_service.UpdateSchemaRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("schema.name", request.schema.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + schema.Schema, + metadata_type=schema_service.UpdateSchemaMetadata, + ) + + # Done; return the response. + return response + + def delete_schema( + self, + request: Optional[Union[schema_service.DeleteSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a [Schema][google.cloud.discoveryengine.v1.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_delete_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_schema(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.DeleteSchemaRequest, dict]): + The request object. Request message for + [SchemaService.DeleteSchema][google.cloud.discoveryengine.v1.SchemaService.DeleteSchema] + method. + name (str): + Required. The full resource name of the schema, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a schema_service.DeleteSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema_service.DeleteSchemaRequest): + request = schema_service.DeleteSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=schema_service.DeleteSchemaMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SchemaServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SchemaServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/pagers.py new file mode 100644 index 000000000000..42cc77d16d84 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1.types import schema, schema_service + + +class ListSchemasPager: + """A pager for iterating through ``list_schemas`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.ListSchemasResponse` object, and + provides an ``__iter__`` method to iterate through its + ``schemas`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSchemas`` requests and continue to iterate + through the ``schemas`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.ListSchemasResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., schema_service.ListSchemasResponse], + request: schema_service.ListSchemasRequest, + response: schema_service.ListSchemasResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.ListSchemasRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.ListSchemasResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = schema_service.ListSchemasRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[schema_service.ListSchemasResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[schema.Schema]: + for page in self.pages: + yield from page.schemas + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSchemasAsyncPager: + """A pager for iterating through ``list_schemas`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.ListSchemasResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``schemas`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSchemas`` requests and continue to iterate + through the ``schemas`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.ListSchemasResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[schema_service.ListSchemasResponse]], + request: schema_service.ListSchemasRequest, + response: schema_service.ListSchemasResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.ListSchemasRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.ListSchemasResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = schema_service.ListSchemasRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[schema_service.ListSchemasResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[schema.Schema]: + async def async_generator(): + async for page in self.pages: + for response in page.schemas: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/__init__.py new file mode 100644 index 000000000000..4948bcebf0a6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SchemaServiceTransport +from .grpc import SchemaServiceGrpcTransport +from .grpc_asyncio import SchemaServiceGrpcAsyncIOTransport +from .rest import SchemaServiceRestInterceptor, SchemaServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SchemaServiceTransport]] +_transport_registry["grpc"] = SchemaServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SchemaServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SchemaServiceRestTransport + +__all__ = ( + "SchemaServiceTransport", + "SchemaServiceGrpcTransport", + "SchemaServiceGrpcAsyncIOTransport", + "SchemaServiceRestTransport", + "SchemaServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/base.py new file mode 100644 index 000000000000..90f4c0c6b627 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/base.py @@ -0,0 +1,241 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version +from google.cloud.discoveryengine_v1.types import schema, schema_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class SchemaServiceTransport(abc.ABC): + """Abstract transport class for SchemaService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_schema: gapic_v1.method.wrap_method( + self.get_schema, + default_timeout=None, + client_info=client_info, + ), + self.list_schemas: gapic_v1.method.wrap_method( + self.list_schemas, + default_timeout=None, + client_info=client_info, + ), + self.create_schema: gapic_v1.method.wrap_method( + self.create_schema, + default_timeout=None, + client_info=client_info, + ), + self.update_schema: gapic_v1.method.wrap_method( + self.update_schema, + default_timeout=None, + client_info=client_info, + ), + self.delete_schema: gapic_v1.method.wrap_method( + self.delete_schema, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def get_schema( + self, + ) -> Callable[ + [schema_service.GetSchemaRequest], + Union[schema.Schema, Awaitable[schema.Schema]], + ]: + raise NotImplementedError() + + @property + def list_schemas( + self, + ) -> Callable[ + [schema_service.ListSchemasRequest], + Union[ + schema_service.ListSchemasResponse, + Awaitable[schema_service.ListSchemasResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_schema( + self, + ) -> Callable[ + [schema_service.CreateSchemaRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_schema( + self, + ) -> Callable[ + [schema_service.UpdateSchemaRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_schema( + self, + ) -> Callable[ + [schema_service.DeleteSchemaRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SchemaServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/grpc.py new file mode 100644 index 000000000000..d0ec302346db --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/grpc.py @@ -0,0 +1,423 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1.types import schema, schema_service + +from .base import DEFAULT_CLIENT_INFO, SchemaServiceTransport + + +class SchemaServiceGrpcTransport(SchemaServiceTransport): + """gRPC backend transport for SchemaService. + + Service for managing + [Schema][google.cloud.discoveryengine.v1.Schema]s. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def get_schema(self) -> Callable[[schema_service.GetSchemaRequest], schema.Schema]: + r"""Return a callable for the get schema method over gRPC. + + Gets a [Schema][google.cloud.discoveryengine.v1.Schema]. + + Returns: + Callable[[~.GetSchemaRequest], + ~.Schema]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_schema" not in self._stubs: + self._stubs["get_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SchemaService/GetSchema", + request_serializer=schema_service.GetSchemaRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["get_schema"] + + @property + def list_schemas( + self, + ) -> Callable[ + [schema_service.ListSchemasRequest], schema_service.ListSchemasResponse + ]: + r"""Return a callable for the list schemas method over gRPC. + + Gets a list of + [Schema][google.cloud.discoveryengine.v1.Schema]s. + + Returns: + Callable[[~.ListSchemasRequest], + ~.ListSchemasResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_schemas" not in self._stubs: + self._stubs["list_schemas"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SchemaService/ListSchemas", + request_serializer=schema_service.ListSchemasRequest.serialize, + response_deserializer=schema_service.ListSchemasResponse.deserialize, + ) + return self._stubs["list_schemas"] + + @property + def create_schema( + self, + ) -> Callable[[schema_service.CreateSchemaRequest], operations_pb2.Operation]: + r"""Return a callable for the create schema method over gRPC. + + Creates a [Schema][google.cloud.discoveryengine.v1.Schema]. + + Returns: + Callable[[~.CreateSchemaRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_schema" not in self._stubs: + self._stubs["create_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SchemaService/CreateSchema", + request_serializer=schema_service.CreateSchemaRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_schema"] + + @property + def update_schema( + self, + ) -> Callable[[schema_service.UpdateSchemaRequest], operations_pb2.Operation]: + r"""Return a callable for the update schema method over gRPC. + + Updates a [Schema][google.cloud.discoveryengine.v1.Schema]. + + Returns: + Callable[[~.UpdateSchemaRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_schema" not in self._stubs: + self._stubs["update_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SchemaService/UpdateSchema", + request_serializer=schema_service.UpdateSchemaRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_schema"] + + @property + def delete_schema( + self, + ) -> Callable[[schema_service.DeleteSchemaRequest], operations_pb2.Operation]: + r"""Return a callable for the delete schema method over gRPC. + + Deletes a [Schema][google.cloud.discoveryengine.v1.Schema]. + + Returns: + Callable[[~.DeleteSchemaRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_schema" not in self._stubs: + self._stubs["delete_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SchemaService/DeleteSchema", + request_serializer=schema_service.DeleteSchemaRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_schema"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SchemaServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a99a5483e7b2 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/grpc_asyncio.py @@ -0,0 +1,433 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1.types import schema, schema_service + +from .base import DEFAULT_CLIENT_INFO, SchemaServiceTransport +from .grpc import SchemaServiceGrpcTransport + + +class SchemaServiceGrpcAsyncIOTransport(SchemaServiceTransport): + """gRPC AsyncIO backend transport for SchemaService. + + Service for managing + [Schema][google.cloud.discoveryengine.v1.Schema]s. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def get_schema( + self, + ) -> Callable[[schema_service.GetSchemaRequest], Awaitable[schema.Schema]]: + r"""Return a callable for the get schema method over gRPC. + + Gets a [Schema][google.cloud.discoveryengine.v1.Schema]. + + Returns: + Callable[[~.GetSchemaRequest], + Awaitable[~.Schema]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_schema" not in self._stubs: + self._stubs["get_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SchemaService/GetSchema", + request_serializer=schema_service.GetSchemaRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["get_schema"] + + @property + def list_schemas( + self, + ) -> Callable[ + [schema_service.ListSchemasRequest], + Awaitable[schema_service.ListSchemasResponse], + ]: + r"""Return a callable for the list schemas method over gRPC. + + Gets a list of + [Schema][google.cloud.discoveryengine.v1.Schema]s. + + Returns: + Callable[[~.ListSchemasRequest], + Awaitable[~.ListSchemasResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_schemas" not in self._stubs: + self._stubs["list_schemas"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SchemaService/ListSchemas", + request_serializer=schema_service.ListSchemasRequest.serialize, + response_deserializer=schema_service.ListSchemasResponse.deserialize, + ) + return self._stubs["list_schemas"] + + @property + def create_schema( + self, + ) -> Callable[ + [schema_service.CreateSchemaRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create schema method over gRPC. + + Creates a [Schema][google.cloud.discoveryengine.v1.Schema]. + + Returns: + Callable[[~.CreateSchemaRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_schema" not in self._stubs: + self._stubs["create_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SchemaService/CreateSchema", + request_serializer=schema_service.CreateSchemaRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_schema"] + + @property + def update_schema( + self, + ) -> Callable[ + [schema_service.UpdateSchemaRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update schema method over gRPC. + + Updates a [Schema][google.cloud.discoveryengine.v1.Schema]. + + Returns: + Callable[[~.UpdateSchemaRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_schema" not in self._stubs: + self._stubs["update_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SchemaService/UpdateSchema", + request_serializer=schema_service.UpdateSchemaRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_schema"] + + @property + def delete_schema( + self, + ) -> Callable[ + [schema_service.DeleteSchemaRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete schema method over gRPC. + + Deletes a [Schema][google.cloud.discoveryengine.v1.Schema]. + + Returns: + Callable[[~.DeleteSchemaRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_schema" not in self._stubs: + self._stubs["delete_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SchemaService/DeleteSchema", + request_serializer=schema_service.DeleteSchemaRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_schema"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("SchemaServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py new file mode 100644 index 000000000000..1ec11444e533 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py @@ -0,0 +1,1251 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import schema, schema_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SchemaServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SchemaServiceRestInterceptor: + """Interceptor for SchemaService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SchemaServiceRestTransport. + + .. code-block:: python + class MyCustomSchemaServiceInterceptor(SchemaServiceRestInterceptor): + def pre_create_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_schemas(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_schemas(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_schema(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SchemaServiceRestTransport(interceptor=MyCustomSchemaServiceInterceptor()) + client = SchemaServiceClient(transport=transport) + + + """ + + def pre_create_schema( + self, + request: schema_service.CreateSchemaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[schema_service.CreateSchemaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_create_schema( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_schema + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_delete_schema( + self, + request: schema_service.DeleteSchemaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[schema_service.DeleteSchemaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_delete_schema( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_schema + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_get_schema( + self, + request: schema_service.GetSchemaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[schema_service.GetSchemaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_get_schema(self, response: schema.Schema) -> schema.Schema: + """Post-rpc interceptor for get_schema + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_list_schemas( + self, + request: schema_service.ListSchemasRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[schema_service.ListSchemasRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_schemas + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_list_schemas( + self, response: schema_service.ListSchemasResponse + ) -> schema_service.ListSchemasResponse: + """Post-rpc interceptor for list_schemas + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_update_schema( + self, + request: schema_service.UpdateSchemaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[schema_service.UpdateSchemaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_update_schema( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_schema + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SchemaServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SchemaServiceRestInterceptor + + +class SchemaServiceRestTransport(SchemaServiceTransport): + """REST backend transport for SchemaService. + + Service for managing + [Schema][google.cloud.discoveryengine.v1.Schema]s. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SchemaServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SchemaServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateSchema(SchemaServiceRestStub): + def __hash__(self): + return hash("CreateSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "schemaId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema_service.CreateSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create schema method over HTTP. + + Args: + request (~.schema_service.CreateSchemaRequest): + The request object. Request message for + [SchemaService.CreateSchema][google.cloud.discoveryengine.v1.SchemaService.CreateSchema] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*}/schemas", + "body": "schema", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/schemas", + "body": "schema", + }, + ] + request, metadata = self._interceptor.pre_create_schema(request, metadata) + pb_request = schema_service.CreateSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_schema(resp) + return resp + + class _DeleteSchema(SchemaServiceRestStub): + def __hash__(self): + return hash("DeleteSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema_service.DeleteSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete schema method over HTTP. + + Args: + request (~.schema_service.DeleteSchemaRequest): + The request object. Request message for + [SchemaService.DeleteSchema][google.cloud.discoveryengine.v1.SchemaService.DeleteSchema] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/schemas/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_schema(request, metadata) + pb_request = schema_service.DeleteSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_schema(resp) + return resp + + class _GetSchema(SchemaServiceRestStub): + def __hash__(self): + return hash("GetSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema_service.GetSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Call the get schema method over HTTP. + + Args: + request (~.schema_service.GetSchemaRequest): + The request object. Request message for + [SchemaService.GetSchema][google.cloud.discoveryengine.v1.SchemaService.GetSchema] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schema.Schema: + Defines the structure and layout of a + type of document data. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/schemas/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}", + }, + ] + request, metadata = self._interceptor.pre_get_schema(request, metadata) + pb_request = schema_service.GetSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema.Schema() + pb_resp = schema.Schema.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_schema(resp) + return resp + + class _ListSchemas(SchemaServiceRestStub): + def __hash__(self): + return hash("ListSchemas") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema_service.ListSchemasRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema_service.ListSchemasResponse: + r"""Call the list schemas method over HTTP. + + Args: + request (~.schema_service.ListSchemasRequest): + The request object. Request message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1.SchemaService.ListSchemas] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schema_service.ListSchemasResponse: + Response message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1.SchemaService.ListSchemas] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*}/schemas", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/schemas", + }, + ] + request, metadata = self._interceptor.pre_list_schemas(request, metadata) + pb_request = schema_service.ListSchemasRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema_service.ListSchemasResponse() + pb_resp = schema_service.ListSchemasResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_schemas(resp) + return resp + + class _UpdateSchema(SchemaServiceRestStub): + def __hash__(self): + return hash("UpdateSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema_service.UpdateSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update schema method over HTTP. + + Args: + request (~.schema_service.UpdateSchemaRequest): + The request object. Request message for + [SchemaService.UpdateSchema][google.cloud.discoveryengine.v1.SchemaService.UpdateSchema] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{schema.name=projects/*/locations/*/dataStores/*/schemas/*}", + "body": "schema", + }, + { + "method": "patch", + "uri": "/v1/{schema.name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}", + "body": "schema", + }, + ] + request, metadata = self._interceptor.pre_update_schema(request, metadata) + pb_request = schema_service.UpdateSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_schema(resp) + return resp + + @property + def create_schema( + self, + ) -> Callable[[schema_service.CreateSchemaRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_schema( + self, + ) -> Callable[[schema_service.DeleteSchemaRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_schema(self) -> Callable[[schema_service.GetSchemaRequest], schema.Schema]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_schemas( + self, + ) -> Callable[ + [schema_service.ListSchemasRequest], schema_service.ListSchemasResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSchemas(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_schema( + self, + ) -> Callable[[schema_service.UpdateSchemaRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(SchemaServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(SchemaServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SchemaServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/__init__.py new file mode 100644 index 000000000000..be448b1d6b5a --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SearchServiceAsyncClient +from .client import SearchServiceClient + +__all__ = ( + "SearchServiceClient", + "SearchServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/async_client.py new file mode 100644 index 000000000000..95c695542945 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/async_client.py @@ -0,0 +1,442 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 + +from google.cloud.discoveryengine_v1.services.search_service import pagers +from google.cloud.discoveryengine_v1.types import search_service + +from .client import SearchServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, SearchServiceTransport +from .transports.grpc_asyncio import SearchServiceGrpcAsyncIOTransport + + +class SearchServiceAsyncClient: + """Service for search.""" + + _client: SearchServiceClient + + DEFAULT_ENDPOINT = SearchServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SearchServiceClient.DEFAULT_MTLS_ENDPOINT + + branch_path = staticmethod(SearchServiceClient.branch_path) + parse_branch_path = staticmethod(SearchServiceClient.parse_branch_path) + document_path = staticmethod(SearchServiceClient.document_path) + parse_document_path = staticmethod(SearchServiceClient.parse_document_path) + serving_config_path = staticmethod(SearchServiceClient.serving_config_path) + parse_serving_config_path = staticmethod( + SearchServiceClient.parse_serving_config_path + ) + common_billing_account_path = staticmethod( + SearchServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SearchServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SearchServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SearchServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SearchServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SearchServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(SearchServiceClient.common_project_path) + parse_common_project_path = staticmethod( + SearchServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(SearchServiceClient.common_location_path) + parse_common_location_path = staticmethod( + SearchServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchServiceAsyncClient: The constructed client. + """ + return SearchServiceClient.from_service_account_info.__func__(SearchServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchServiceAsyncClient: The constructed client. + """ + return SearchServiceClient.from_service_account_file.__func__(SearchServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SearchServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SearchServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SearchServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(SearchServiceClient).get_transport_class, type(SearchServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SearchServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the search service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SearchServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SearchServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def search( + self, + request: Optional[Union[search_service.SearchRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchAsyncPager: + r"""Performs a search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_search(): + # Create a client + client = discoveryengine_v1.SearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.SearchRequest( + serving_config="serving_config_value", + ) + + # Make the request + page_result = client.search(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.SearchRequest, dict]]): + The request object. Request message for + [SearchService.Search][google.cloud.discoveryengine.v1.SearchService.Search] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.search_service.pagers.SearchAsyncPager: + Response message for + [SearchService.Search][google.cloud.discoveryengine.v1.SearchService.Search] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + request = search_service.SearchRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("serving_config", request.serving_config),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SearchServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/client.py new file mode 100644 index 000000000000..5b22e268721f --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/client.py @@ -0,0 +1,723 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 + +from google.cloud.discoveryengine_v1.services.search_service import pagers +from google.cloud.discoveryengine_v1.types import search_service + +from .transports.base import DEFAULT_CLIENT_INFO, SearchServiceTransport +from .transports.grpc import SearchServiceGrpcTransport +from .transports.grpc_asyncio import SearchServiceGrpcAsyncIOTransport +from .transports.rest import SearchServiceRestTransport + + +class SearchServiceClientMeta(type): + """Metaclass for the SearchService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[SearchServiceTransport]] + _transport_registry["grpc"] = SearchServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SearchServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SearchServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SearchServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SearchServiceClient(metaclass=SearchServiceClientMeta): + """Service for search.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SearchServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SearchServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def branch_path( + project: str, + location: str, + data_store: str, + branch: str, + ) -> str: + """Returns a fully-qualified branch string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + ) + + @staticmethod + def parse_branch_path(path: str) -> Dict[str, str]: + """Parses a branch path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/branches/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def document_path( + project: str, + location: str, + data_store: str, + branch: str, + document: str, + ) -> str: + """Returns a fully-qualified document string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + document=document, + ) + + @staticmethod + def parse_document_path(path: str) -> Dict[str, str]: + """Parses a document path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/branches/(?P.+?)/documents/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def serving_config_path( + project: str, + location: str, + data_store: str, + serving_config: str, + ) -> str: + """Returns a fully-qualified serving_config string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/servingConfigs/{serving_config}".format( + project=project, + location=location, + data_store=data_store, + serving_config=serving_config, + ) + + @staticmethod + def parse_serving_config_path(path: str) -> Dict[str, str]: + """Parses a serving_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/servingConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SearchServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the search service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SearchServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SearchServiceTransport): + # transport is a SearchServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def search( + self, + request: Optional[Union[search_service.SearchRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchPager: + r"""Performs a search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_search(): + # Create a client + client = discoveryengine_v1.SearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.SearchRequest( + serving_config="serving_config_value", + ) + + # Make the request + page_result = client.search(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.SearchRequest, dict]): + The request object. Request message for + [SearchService.Search][google.cloud.discoveryengine.v1.SearchService.Search] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.search_service.pagers.SearchPager: + Response message for + [SearchService.Search][google.cloud.discoveryengine.v1.SearchService.Search] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a search_service.SearchRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, search_service.SearchRequest): + request = search_service.SearchRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("serving_config", request.serving_config),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SearchServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SearchServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/pagers.py new file mode 100644 index 000000000000..0e1bed6be230 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1.types import search_service + + +class SearchPager: + """A pager for iterating through ``search`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.SearchResponse` object, and + provides an ``__iter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``Search`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.SearchResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., search_service.SearchResponse], + request: search_service.SearchRequest, + response: search_service.SearchResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.SearchRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.SearchResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = search_service.SearchRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[search_service.SearchResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[search_service.SearchResponse.SearchResult]: + for page in self.pages: + yield from page.results + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchAsyncPager: + """A pager for iterating through ``search`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.SearchResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``Search`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.SearchResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[search_service.SearchResponse]], + request: search_service.SearchRequest, + response: search_service.SearchResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.SearchRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.SearchResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = search_service.SearchRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[search_service.SearchResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[search_service.SearchResponse.SearchResult]: + async def async_generator(): + async for page in self.pages: + for response in page.results: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/__init__.py new file mode 100644 index 000000000000..5882bd2969d8 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SearchServiceTransport +from .grpc import SearchServiceGrpcTransport +from .grpc_asyncio import SearchServiceGrpcAsyncIOTransport +from .rest import SearchServiceRestInterceptor, SearchServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SearchServiceTransport]] +_transport_registry["grpc"] = SearchServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SearchServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SearchServiceRestTransport + +__all__ = ( + "SearchServiceTransport", + "SearchServiceGrpcTransport", + "SearchServiceGrpcAsyncIOTransport", + "SearchServiceRestTransport", + "SearchServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/base.py new file mode 100644 index 000000000000..235c45e16509 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/base.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version +from google.cloud.discoveryengine_v1.types import search_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class SearchServiceTransport(abc.ABC): + """Abstract transport class for SearchService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.search: gapic_v1.method.wrap_method( + self.search, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def search( + self, + ) -> Callable[ + [search_service.SearchRequest], + Union[search_service.SearchResponse, Awaitable[search_service.SearchResponse]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SearchServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/grpc.py new file mode 100644 index 000000000000..6215bce1803b --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/grpc.py @@ -0,0 +1,302 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 +import grpc # type: ignore + +from google.cloud.discoveryengine_v1.types import search_service + +from .base import DEFAULT_CLIENT_INFO, SearchServiceTransport + + +class SearchServiceGrpcTransport(SearchServiceTransport): + """gRPC backend transport for SearchService. + + Service for search. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def search( + self, + ) -> Callable[[search_service.SearchRequest], search_service.SearchResponse]: + r"""Return a callable for the search method over gRPC. + + Performs a search. + + Returns: + Callable[[~.SearchRequest], + ~.SearchResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search" not in self._stubs: + self._stubs["search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SearchService/Search", + request_serializer=search_service.SearchRequest.serialize, + response_deserializer=search_service.SearchResponse.deserialize, + ) + return self._stubs["search"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SearchServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..9c1840b887c1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/grpc_asyncio.py @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1.types import search_service + +from .base import DEFAULT_CLIENT_INFO, SearchServiceTransport +from .grpc import SearchServiceGrpcTransport + + +class SearchServiceGrpcAsyncIOTransport(SearchServiceTransport): + """gRPC AsyncIO backend transport for SearchService. + + Service for search. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def search( + self, + ) -> Callable[ + [search_service.SearchRequest], Awaitable[search_service.SearchResponse] + ]: + r"""Return a callable for the search method over gRPC. + + Performs a search. + + Returns: + Callable[[~.SearchRequest], + Awaitable[~.SearchResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search" not in self._stubs: + self._stubs["search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SearchService/Search", + request_serializer=search_service.SearchRequest.serialize, + response_deserializer=search_service.SearchResponse.deserialize, + ) + return self._stubs["search"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("SearchServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py new file mode 100644 index 000000000000..886e12dc2620 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py @@ -0,0 +1,580 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.discoveryengine_v1.types import search_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SearchServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SearchServiceRestInterceptor: + """Interceptor for SearchService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SearchServiceRestTransport. + + .. code-block:: python + class MyCustomSearchServiceInterceptor(SearchServiceRestInterceptor): + def pre_search(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SearchServiceRestTransport(interceptor=MyCustomSearchServiceInterceptor()) + client = SearchServiceClient(transport=transport) + + + """ + + def pre_search( + self, request: search_service.SearchRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[search_service.SearchRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for search + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchService server. + """ + return request, metadata + + def post_search( + self, response: search_service.SearchResponse + ) -> search_service.SearchResponse: + """Post-rpc interceptor for search + + Override in a subclass to manipulate the response + after it is returned by the SearchService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SearchService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the SearchService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SearchServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SearchServiceRestInterceptor + + +class SearchServiceRestTransport(SearchServiceTransport): + """REST backend transport for SearchService. + + Service for search. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SearchServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SearchServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Search(SearchServiceRestStub): + def __hash__(self): + return hash("Search") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: search_service.SearchRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_service.SearchResponse: + r"""Call the search method over HTTP. + + Args: + request (~.search_service.SearchRequest): + The request object. Request message for + [SearchService.Search][google.cloud.discoveryengine.v1.SearchService.Search] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.search_service.SearchResponse: + Response message for + [SearchService.Search][google.cloud.discoveryengine.v1.SearchService.Search] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{serving_config=projects/*/locations/*/dataStores/*/servingConfigs/*}:search", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{serving_config=projects/*/locations/*/collections/*/dataStores/*/servingConfigs/*}:search", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_search(request, metadata) + pb_request = search_service.SearchRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = search_service.SearchResponse() + pb_resp = search_service.SearchResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search(resp) + return resp + + @property + def search( + self, + ) -> Callable[[search_service.SearchRequest], search_service.SearchResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Search(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(SearchServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(SearchServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SearchServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/__init__.py new file mode 100644 index 000000000000..463f46f170a0 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import UserEventServiceAsyncClient +from .client import UserEventServiceClient + +__all__ = ( + "UserEventServiceClient", + "UserEventServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py new file mode 100644 index 000000000000..b9c824f700a7 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py @@ -0,0 +1,683 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api import httpbody_pb2 # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + common, + import_config, + user_event, + user_event_service, +) + +from .client import UserEventServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, UserEventServiceTransport +from .transports.grpc_asyncio import UserEventServiceGrpcAsyncIOTransport + + +class UserEventServiceAsyncClient: + """Service for ingesting end user actions on a website to + Discovery Engine API. + """ + + _client: UserEventServiceClient + + DEFAULT_ENDPOINT = UserEventServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = UserEventServiceClient.DEFAULT_MTLS_ENDPOINT + + data_store_path = staticmethod(UserEventServiceClient.data_store_path) + parse_data_store_path = staticmethod(UserEventServiceClient.parse_data_store_path) + document_path = staticmethod(UserEventServiceClient.document_path) + parse_document_path = staticmethod(UserEventServiceClient.parse_document_path) + common_billing_account_path = staticmethod( + UserEventServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + UserEventServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(UserEventServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + UserEventServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + UserEventServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + UserEventServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(UserEventServiceClient.common_project_path) + parse_common_project_path = staticmethod( + UserEventServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(UserEventServiceClient.common_location_path) + parse_common_location_path = staticmethod( + UserEventServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + UserEventServiceAsyncClient: The constructed client. + """ + return UserEventServiceClient.from_service_account_info.__func__(UserEventServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + UserEventServiceAsyncClient: The constructed client. + """ + return UserEventServiceClient.from_service_account_file.__func__(UserEventServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return UserEventServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> UserEventServiceTransport: + """Returns the transport used by the client instance. + + Returns: + UserEventServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(UserEventServiceClient).get_transport_class, type(UserEventServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, UserEventServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the user event service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.UserEventServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = UserEventServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def write_user_event( + self, + request: Optional[Union[user_event_service.WriteUserEventRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> user_event.UserEvent: + r"""Writes a single user event. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_write_user_event(): + # Create a client + client = discoveryengine_v1.UserEventServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.WriteUserEventRequest( + parent="parent_value", + ) + + # Make the request + response = await client.write_user_event(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.WriteUserEventRequest, dict]]): + The request object. Request message for WriteUserEvent + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.UserEvent: + UserEvent captures all metadata + information Discovery Engine API needs + to know about how end users interact + with customers' website. + + """ + # Create or coerce a protobuf request object. + request = user_event_service.WriteUserEventRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.write_user_event, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def collect_user_event( + self, + request: Optional[ + Union[user_event_service.CollectUserEventRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> httpbody_pb2.HttpBody: + r"""Writes a single user event from the browser. This + uses a GET request to due to browser restriction of + POST-ing to a 3rd party domain. + This method is used only by the Discovery Engine API + JavaScript pixel and Google Tag Manager. Users should + not call this method directly. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_collect_user_event(): + # Create a client + client = discoveryengine_v1.UserEventServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CollectUserEventRequest( + parent="parent_value", + user_event="user_event_value", + ) + + # Make the request + response = await client.collect_user_event(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.CollectUserEventRequest, dict]]): + The request object. Request message for CollectUserEvent + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api.httpbody_pb2.HttpBody: + Message that represents an arbitrary HTTP body. It should only be used for + payload formats that can't be represented as JSON, + such as raw binary or an HTML page. + + This message can be used both in streaming and + non-streaming API methods in the request as well as + the response. + + It can be used as a top-level request field, which is + convenient if one wants to extract parameters from + either the URL or HTTP template into the request + fields and also want access to the raw HTTP body. + + Example: + + message GetResourceRequest { + // A unique request id. string request_id = 1; + + // The raw HTTP body is bound to this field. + google.api.HttpBody http_body = 2; + + } + + service ResourceService { + rpc GetResource(GetResourceRequest) + returns (google.api.HttpBody); + + rpc UpdateResource(google.api.HttpBody) + returns (google.protobuf.Empty); + + } + + Example with streaming methods: + + service CaldavService { + rpc GetCalendar(stream google.api.HttpBody) + returns (stream google.api.HttpBody); + + rpc UpdateCalendar(stream google.api.HttpBody) + returns (stream google.api.HttpBody); + + } + + Use of this type only changes how the request and + response bodies are handled, all other features will + continue to work unchanged. + + """ + # Create or coerce a protobuf request object. + request = user_event_service.CollectUserEventRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.collect_user_event, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def import_user_events( + self, + request: Optional[Union[import_config.ImportUserEventsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Bulk import of User events. Request processing might + be synchronous. Events that already exist are skipped. + Use this method for backfilling historical user events. + Operation.response is of type ImportResponse. Note that + it is possible for a subset of the items to be + successfully inserted. Operation.metadata is of type + ImportMetadata. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_import_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceAsyncClient() + + # Initialize request argument(s) + inline_source = discoveryengine_v1.InlineSource() + inline_source.user_events.event_type = "event_type_value" + inline_source.user_events.user_pseudo_id = "user_pseudo_id_value" + + request = discoveryengine_v1.ImportUserEventsRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_user_events(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.ImportUserEventsRequest, dict]]): + The request object. Request message for the + ImportUserEvents request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.ImportUserEventsResponse` Response of the ImportUserEventsRequest. If the long running + operation was successful, then this message is + returned by the + google.longrunning.Operations.response field if the + operation was successful. + + """ + # Create or coerce a protobuf request object. + request = import_config.ImportUserEventsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_user_events, + default_retry=retries.Retry( + initial=1.0, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + import_config.ImportUserEventsResponse, + metadata_type=import_config.ImportUserEventsMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("UserEventServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py new file mode 100644 index 000000000000..d2109b2a5c84 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py @@ -0,0 +1,937 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api import httpbody_pb2 # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + common, + import_config, + user_event, + user_event_service, +) + +from .transports.base import DEFAULT_CLIENT_INFO, UserEventServiceTransport +from .transports.grpc import UserEventServiceGrpcTransport +from .transports.grpc_asyncio import UserEventServiceGrpcAsyncIOTransport +from .transports.rest import UserEventServiceRestTransport + + +class UserEventServiceClientMeta(type): + """Metaclass for the UserEventService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[UserEventServiceTransport]] + _transport_registry["grpc"] = UserEventServiceGrpcTransport + _transport_registry["grpc_asyncio"] = UserEventServiceGrpcAsyncIOTransport + _transport_registry["rest"] = UserEventServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[UserEventServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class UserEventServiceClient(metaclass=UserEventServiceClientMeta): + """Service for ingesting end user actions on a website to + Discovery Engine API. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + UserEventServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + UserEventServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> UserEventServiceTransport: + """Returns the transport used by the client instance. + + Returns: + UserEventServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def document_path( + project: str, + location: str, + data_store: str, + branch: str, + document: str, + ) -> str: + """Returns a fully-qualified document string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + document=document, + ) + + @staticmethod + def parse_document_path(path: str) -> Dict[str, str]: + """Parses a document path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/branches/(?P.+?)/documents/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, UserEventServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the user event service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, UserEventServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, UserEventServiceTransport): + # transport is a UserEventServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def write_user_event( + self, + request: Optional[Union[user_event_service.WriteUserEventRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> user_event.UserEvent: + r"""Writes a single user event. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_write_user_event(): + # Create a client + client = discoveryengine_v1.UserEventServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.WriteUserEventRequest( + parent="parent_value", + ) + + # Make the request + response = client.write_user_event(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.WriteUserEventRequest, dict]): + The request object. Request message for WriteUserEvent + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.UserEvent: + UserEvent captures all metadata + information Discovery Engine API needs + to know about how end users interact + with customers' website. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a user_event_service.WriteUserEventRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, user_event_service.WriteUserEventRequest): + request = user_event_service.WriteUserEventRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.write_user_event] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def collect_user_event( + self, + request: Optional[ + Union[user_event_service.CollectUserEventRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> httpbody_pb2.HttpBody: + r"""Writes a single user event from the browser. This + uses a GET request to due to browser restriction of + POST-ing to a 3rd party domain. + This method is used only by the Discovery Engine API + JavaScript pixel and Google Tag Manager. Users should + not call this method directly. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_collect_user_event(): + # Create a client + client = discoveryengine_v1.UserEventServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CollectUserEventRequest( + parent="parent_value", + user_event="user_event_value", + ) + + # Make the request + response = client.collect_user_event(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.CollectUserEventRequest, dict]): + The request object. Request message for CollectUserEvent + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api.httpbody_pb2.HttpBody: + Message that represents an arbitrary HTTP body. It should only be used for + payload formats that can't be represented as JSON, + such as raw binary or an HTML page. + + This message can be used both in streaming and + non-streaming API methods in the request as well as + the response. + + It can be used as a top-level request field, which is + convenient if one wants to extract parameters from + either the URL or HTTP template into the request + fields and also want access to the raw HTTP body. + + Example: + + message GetResourceRequest { + // A unique request id. string request_id = 1; + + // The raw HTTP body is bound to this field. + google.api.HttpBody http_body = 2; + + } + + service ResourceService { + rpc GetResource(GetResourceRequest) + returns (google.api.HttpBody); + + rpc UpdateResource(google.api.HttpBody) + returns (google.protobuf.Empty); + + } + + Example with streaming methods: + + service CaldavService { + rpc GetCalendar(stream google.api.HttpBody) + returns (stream google.api.HttpBody); + + rpc UpdateCalendar(stream google.api.HttpBody) + returns (stream google.api.HttpBody); + + } + + Use of this type only changes how the request and + response bodies are handled, all other features will + continue to work unchanged. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a user_event_service.CollectUserEventRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, user_event_service.CollectUserEventRequest): + request = user_event_service.CollectUserEventRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.collect_user_event] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def import_user_events( + self, + request: Optional[Union[import_config.ImportUserEventsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Bulk import of User events. Request processing might + be synchronous. Events that already exist are skipped. + Use this method for backfilling historical user events. + Operation.response is of type ImportResponse. Note that + it is possible for a subset of the items to be + successfully inserted. Operation.metadata is of type + ImportMetadata. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_import_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceClient() + + # Initialize request argument(s) + inline_source = discoveryengine_v1.InlineSource() + inline_source.user_events.event_type = "event_type_value" + inline_source.user_events.user_pseudo_id = "user_pseudo_id_value" + + request = discoveryengine_v1.ImportUserEventsRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_user_events(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.ImportUserEventsRequest, dict]): + The request object. Request message for the + ImportUserEvents request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.ImportUserEventsResponse` Response of the ImportUserEventsRequest. If the long running + operation was successful, then this message is + returned by the + google.longrunning.Operations.response field if the + operation was successful. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a import_config.ImportUserEventsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, import_config.ImportUserEventsRequest): + request = import_config.ImportUserEventsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_user_events] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + import_config.ImportUserEventsResponse, + metadata_type=import_config.ImportUserEventsMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "UserEventServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("UserEventServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/__init__.py new file mode 100644 index 000000000000..15fb69bda2c8 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import UserEventServiceTransport +from .grpc import UserEventServiceGrpcTransport +from .grpc_asyncio import UserEventServiceGrpcAsyncIOTransport +from .rest import UserEventServiceRestInterceptor, UserEventServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[UserEventServiceTransport]] +_transport_registry["grpc"] = UserEventServiceGrpcTransport +_transport_registry["grpc_asyncio"] = UserEventServiceGrpcAsyncIOTransport +_transport_registry["rest"] = UserEventServiceRestTransport + +__all__ = ( + "UserEventServiceTransport", + "UserEventServiceGrpcTransport", + "UserEventServiceGrpcAsyncIOTransport", + "UserEventServiceRestTransport", + "UserEventServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py new file mode 100644 index 000000000000..9fba41d4e707 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py @@ -0,0 +1,224 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.api import httpbody_pb2 # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version +from google.cloud.discoveryengine_v1.types import ( + import_config, + user_event, + user_event_service, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class UserEventServiceTransport(abc.ABC): + """Abstract transport class for UserEventService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.write_user_event: gapic_v1.method.wrap_method( + self.write_user_event, + default_timeout=None, + client_info=client_info, + ), + self.collect_user_event: gapic_v1.method.wrap_method( + self.collect_user_event, + default_timeout=None, + client_info=client_info, + ), + self.import_user_events: gapic_v1.method.wrap_method( + self.import_user_events, + default_retry=retries.Retry( + initial=1.0, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def write_user_event( + self, + ) -> Callable[ + [user_event_service.WriteUserEventRequest], + Union[user_event.UserEvent, Awaitable[user_event.UserEvent]], + ]: + raise NotImplementedError() + + @property + def collect_user_event( + self, + ) -> Callable[ + [user_event_service.CollectUserEventRequest], + Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], + ]: + raise NotImplementedError() + + @property + def import_user_events( + self, + ) -> Callable[ + [import_config.ImportUserEventsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("UserEventServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py new file mode 100644 index 000000000000..9ebe20b7c63a --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py @@ -0,0 +1,386 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api import httpbody_pb2 # type: ignore +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + import_config, + user_event, + user_event_service, +) + +from .base import DEFAULT_CLIENT_INFO, UserEventServiceTransport + + +class UserEventServiceGrpcTransport(UserEventServiceTransport): + """gRPC backend transport for UserEventService. + + Service for ingesting end user actions on a website to + Discovery Engine API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def write_user_event( + self, + ) -> Callable[[user_event_service.WriteUserEventRequest], user_event.UserEvent]: + r"""Return a callable for the write user event method over gRPC. + + Writes a single user event. + + Returns: + Callable[[~.WriteUserEventRequest], + ~.UserEvent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "write_user_event" not in self._stubs: + self._stubs["write_user_event"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.UserEventService/WriteUserEvent", + request_serializer=user_event_service.WriteUserEventRequest.serialize, + response_deserializer=user_event.UserEvent.deserialize, + ) + return self._stubs["write_user_event"] + + @property + def collect_user_event( + self, + ) -> Callable[[user_event_service.CollectUserEventRequest], httpbody_pb2.HttpBody]: + r"""Return a callable for the collect user event method over gRPC. + + Writes a single user event from the browser. This + uses a GET request to due to browser restriction of + POST-ing to a 3rd party domain. + This method is used only by the Discovery Engine API + JavaScript pixel and Google Tag Manager. Users should + not call this method directly. + + Returns: + Callable[[~.CollectUserEventRequest], + ~.HttpBody]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "collect_user_event" not in self._stubs: + self._stubs["collect_user_event"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.UserEventService/CollectUserEvent", + request_serializer=user_event_service.CollectUserEventRequest.serialize, + response_deserializer=httpbody_pb2.HttpBody.FromString, + ) + return self._stubs["collect_user_event"] + + @property + def import_user_events( + self, + ) -> Callable[[import_config.ImportUserEventsRequest], operations_pb2.Operation]: + r"""Return a callable for the import user events method over gRPC. + + Bulk import of User events. Request processing might + be synchronous. Events that already exist are skipped. + Use this method for backfilling historical user events. + Operation.response is of type ImportResponse. Note that + it is possible for a subset of the items to be + successfully inserted. Operation.metadata is of type + ImportMetadata. + + Returns: + Callable[[~.ImportUserEventsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_user_events" not in self._stubs: + self._stubs["import_user_events"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.UserEventService/ImportUserEvents", + request_serializer=import_config.ImportUserEventsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_user_events"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("UserEventServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..3e25b5c0161d --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py @@ -0,0 +1,393 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api import httpbody_pb2 # type: ignore +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + import_config, + user_event, + user_event_service, +) + +from .base import DEFAULT_CLIENT_INFO, UserEventServiceTransport +from .grpc import UserEventServiceGrpcTransport + + +class UserEventServiceGrpcAsyncIOTransport(UserEventServiceTransport): + """gRPC AsyncIO backend transport for UserEventService. + + Service for ingesting end user actions on a website to + Discovery Engine API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def write_user_event( + self, + ) -> Callable[ + [user_event_service.WriteUserEventRequest], Awaitable[user_event.UserEvent] + ]: + r"""Return a callable for the write user event method over gRPC. + + Writes a single user event. + + Returns: + Callable[[~.WriteUserEventRequest], + Awaitable[~.UserEvent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "write_user_event" not in self._stubs: + self._stubs["write_user_event"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.UserEventService/WriteUserEvent", + request_serializer=user_event_service.WriteUserEventRequest.serialize, + response_deserializer=user_event.UserEvent.deserialize, + ) + return self._stubs["write_user_event"] + + @property + def collect_user_event( + self, + ) -> Callable[ + [user_event_service.CollectUserEventRequest], Awaitable[httpbody_pb2.HttpBody] + ]: + r"""Return a callable for the collect user event method over gRPC. + + Writes a single user event from the browser. This + uses a GET request to due to browser restriction of + POST-ing to a 3rd party domain. + This method is used only by the Discovery Engine API + JavaScript pixel and Google Tag Manager. Users should + not call this method directly. + + Returns: + Callable[[~.CollectUserEventRequest], + Awaitable[~.HttpBody]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "collect_user_event" not in self._stubs: + self._stubs["collect_user_event"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.UserEventService/CollectUserEvent", + request_serializer=user_event_service.CollectUserEventRequest.serialize, + response_deserializer=httpbody_pb2.HttpBody.FromString, + ) + return self._stubs["collect_user_event"] + + @property + def import_user_events( + self, + ) -> Callable[ + [import_config.ImportUserEventsRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the import user events method over gRPC. + + Bulk import of User events. Request processing might + be synchronous. Events that already exist are skipped. + Use this method for backfilling historical user events. + Operation.response is of type ImportResponse. Note that + it is possible for a subset of the items to be + successfully inserted. Operation.metadata is of type + ImportMetadata. + + Returns: + Callable[[~.ImportUserEventsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_user_events" not in self._stubs: + self._stubs["import_user_events"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.UserEventService/ImportUserEvents", + request_serializer=import_config.ImportUserEventsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_user_events"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("UserEventServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py new file mode 100644 index 000000000000..4fcc5ececb68 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py @@ -0,0 +1,1045 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.api import httpbody_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + import_config, + user_event, + user_event_service, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import UserEventServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class UserEventServiceRestInterceptor: + """Interceptor for UserEventService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the UserEventServiceRestTransport. + + .. code-block:: python + class MyCustomUserEventServiceInterceptor(UserEventServiceRestInterceptor): + def pre_collect_user_event(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_collect_user_event(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_import_user_events(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_user_events(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_write_user_event(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_write_user_event(self, response): + logging.log(f"Received response: {response}") + return response + + transport = UserEventServiceRestTransport(interceptor=MyCustomUserEventServiceInterceptor()) + client = UserEventServiceClient(transport=transport) + + + """ + + def pre_collect_user_event( + self, + request: user_event_service.CollectUserEventRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[user_event_service.CollectUserEventRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for collect_user_event + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserEventService server. + """ + return request, metadata + + def post_collect_user_event( + self, response: httpbody_pb2.HttpBody + ) -> httpbody_pb2.HttpBody: + """Post-rpc interceptor for collect_user_event + + Override in a subclass to manipulate the response + after it is returned by the UserEventService server but before + it is returned to user code. + """ + return response + + def pre_import_user_events( + self, + request: import_config.ImportUserEventsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[import_config.ImportUserEventsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_user_events + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserEventService server. + """ + return request, metadata + + def post_import_user_events( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for import_user_events + + Override in a subclass to manipulate the response + after it is returned by the UserEventService server but before + it is returned to user code. + """ + return response + + def pre_write_user_event( + self, + request: user_event_service.WriteUserEventRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[user_event_service.WriteUserEventRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for write_user_event + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserEventService server. + """ + return request, metadata + + def post_write_user_event( + self, response: user_event.UserEvent + ) -> user_event.UserEvent: + """Post-rpc interceptor for write_user_event + + Override in a subclass to manipulate the response + after it is returned by the UserEventService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserEventService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the UserEventService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserEventService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the UserEventService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class UserEventServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: UserEventServiceRestInterceptor + + +class UserEventServiceRestTransport(UserEventServiceTransport): + """REST backend transport for UserEventService. + + Service for ingesting end user actions on a website to + Discovery Engine API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[UserEventServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or UserEventServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CollectUserEvent(UserEventServiceRestStub): + def __hash__(self): + return hash("CollectUserEvent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "userEvent": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: user_event_service.CollectUserEventRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> httpbody_pb2.HttpBody: + r"""Call the collect user event method over HTTP. + + Args: + request (~.user_event_service.CollectUserEventRequest): + The request object. Request message for CollectUserEvent + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.httpbody_pb2.HttpBody: + Message that represents an arbitrary HTTP body. It + should only be used for payload formats that can't be + represented as JSON, such as raw binary or an HTML page. + + This message can be used both in streaming and + non-streaming API methods in the request as well as the + response. + + It can be used as a top-level request field, which is + convenient if one wants to extract parameters from + either the URL or HTTP template into the request fields + and also want access to the raw HTTP body. + + Example: + + :: + + message GetResourceRequest { + // A unique request id. + string request_id = 1; + + // The raw HTTP body is bound to this field. + google.api.HttpBody http_body = 2; + + } + + service ResourceService { + rpc GetResource(GetResourceRequest) + returns (google.api.HttpBody); + rpc UpdateResource(google.api.HttpBody) + returns (google.protobuf.Empty); + + } + + Example with streaming methods: + + :: + + service CaldavService { + rpc GetCalendar(stream google.api.HttpBody) + returns (stream google.api.HttpBody); + rpc UpdateCalendar(stream google.api.HttpBody) + returns (stream google.api.HttpBody); + + } + + Use of this type only changes how the request and + response bodies are handled, all other features will + continue to work unchanged. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*}/userEvents:collect", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/userEvents:collect", + }, + ] + request, metadata = self._interceptor.pre_collect_user_event( + request, metadata + ) + pb_request = user_event_service.CollectUserEventRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = httpbody_pb2.HttpBody() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_collect_user_event(resp) + return resp + + class _ImportUserEvents(UserEventServiceRestStub): + def __hash__(self): + return hash("ImportUserEvents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: import_config.ImportUserEventsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the import user events method over HTTP. + + Args: + request (~.import_config.ImportUserEventsRequest): + The request object. Request message for the + ImportUserEvents request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*}/userEvents:import", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/userEvents:import", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_import_user_events( + request, metadata + ) + pb_request = import_config.ImportUserEventsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_user_events(resp) + return resp + + class _WriteUserEvent(UserEventServiceRestStub): + def __hash__(self): + return hash("WriteUserEvent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: user_event_service.WriteUserEventRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> user_event.UserEvent: + r"""Call the write user event method over HTTP. + + Args: + request (~.user_event_service.WriteUserEventRequest): + The request object. Request message for WriteUserEvent + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.user_event.UserEvent: + UserEvent captures all metadata + information Discovery Engine API needs + to know about how end users interact + with customers' website. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*}/userEvents:write", + "body": "user_event", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/userEvents:write", + "body": "user_event", + }, + ] + request, metadata = self._interceptor.pre_write_user_event( + request, metadata + ) + pb_request = user_event_service.WriteUserEventRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = user_event.UserEvent() + pb_resp = user_event.UserEvent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_write_user_event(resp) + return resp + + @property + def collect_user_event( + self, + ) -> Callable[[user_event_service.CollectUserEventRequest], httpbody_pb2.HttpBody]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CollectUserEvent(self._session, self._host, self._interceptor) # type: ignore + + @property + def import_user_events( + self, + ) -> Callable[[import_config.ImportUserEventsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportUserEvents(self._session, self._host, self._interceptor) # type: ignore + + @property + def write_user_event( + self, + ) -> Callable[[user_event_service.WriteUserEventRequest], user_event.UserEvent]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._WriteUserEvent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(UserEventServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(UserEventServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("UserEventServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py new file mode 100644 index 000000000000..bd6e01085c40 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .common import CustomAttribute, UserInfo +from .completion_service import CompleteQueryRequest, CompleteQueryResponse +from .document import Document +from .document_service import ( + CreateDocumentRequest, + DeleteDocumentRequest, + GetDocumentRequest, + ListDocumentsRequest, + ListDocumentsResponse, + UpdateDocumentRequest, +) +from .import_config import ( + BigQuerySource, + GcsSource, + ImportDocumentsMetadata, + ImportDocumentsRequest, + ImportDocumentsResponse, + ImportErrorConfig, + ImportUserEventsMetadata, + ImportUserEventsRequest, + ImportUserEventsResponse, +) +from .purge_config import ( + PurgeDocumentsMetadata, + PurgeDocumentsRequest, + PurgeDocumentsResponse, +) +from .schema import Schema +from .schema_service import ( + CreateSchemaMetadata, + CreateSchemaRequest, + DeleteSchemaMetadata, + DeleteSchemaRequest, + GetSchemaRequest, + ListSchemasRequest, + ListSchemasResponse, + UpdateSchemaMetadata, + UpdateSchemaRequest, +) +from .search_service import SearchRequest, SearchResponse +from .user_event import ( + CompletionInfo, + DocumentInfo, + MediaInfo, + PageInfo, + PanelInfo, + SearchInfo, + TransactionInfo, + UserEvent, +) +from .user_event_service import CollectUserEventRequest, WriteUserEventRequest + +__all__ = ( + "CustomAttribute", + "UserInfo", + "CompleteQueryRequest", + "CompleteQueryResponse", + "Document", + "CreateDocumentRequest", + "DeleteDocumentRequest", + "GetDocumentRequest", + "ListDocumentsRequest", + "ListDocumentsResponse", + "UpdateDocumentRequest", + "BigQuerySource", + "GcsSource", + "ImportDocumentsMetadata", + "ImportDocumentsRequest", + "ImportDocumentsResponse", + "ImportErrorConfig", + "ImportUserEventsMetadata", + "ImportUserEventsRequest", + "ImportUserEventsResponse", + "PurgeDocumentsMetadata", + "PurgeDocumentsRequest", + "PurgeDocumentsResponse", + "Schema", + "CreateSchemaMetadata", + "CreateSchemaRequest", + "DeleteSchemaMetadata", + "DeleteSchemaRequest", + "GetSchemaRequest", + "ListSchemasRequest", + "ListSchemasResponse", + "UpdateSchemaMetadata", + "UpdateSchemaRequest", + "SearchRequest", + "SearchResponse", + "CompletionInfo", + "DocumentInfo", + "MediaInfo", + "PageInfo", + "PanelInfo", + "SearchInfo", + "TransactionInfo", + "UserEvent", + "CollectUserEventRequest", + "WriteUserEventRequest", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/common.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/common.py new file mode 100644 index 000000000000..8592a0cdd8be --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/common.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "CustomAttribute", + "UserInfo", + }, +) + + +class CustomAttribute(proto.Message): + r"""A custom attribute that is not explicitly modeled in a resource, + e.g. [UserEvent][google.cloud.discoveryengine.v1.UserEvent]. + + Attributes: + text (MutableSequence[str]): + The textual values of this custom attribute. For example, + ``["yellow", "green"]`` when the key is "color". + + Empty string is not allowed. Otherwise, an + ``INVALID_ARGUMENT`` error is returned. + + Exactly one of + [CustomAttribute.text][google.cloud.discoveryengine.v1.CustomAttribute.text] + or + [CustomAttribute.numbers][google.cloud.discoveryengine.v1.CustomAttribute.numbers] + should be set. Otherwise, an ``INVALID_ARGUMENT`` error is + returned. + numbers (MutableSequence[float]): + The numerical values of this custom attribute. For example, + ``[2.3, 15.4]`` when the key is "lengths_cm". + + Exactly one of + [CustomAttribute.text][google.cloud.discoveryengine.v1.CustomAttribute.text] + or + [CustomAttribute.numbers][google.cloud.discoveryengine.v1.CustomAttribute.numbers] + should be set. Otherwise, an ``INVALID_ARGUMENT`` error is + returned. + """ + + text: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + numbers: MutableSequence[float] = proto.RepeatedField( + proto.DOUBLE, + number=2, + ) + + +class UserInfo(proto.Message): + r"""Information of an end user. + + Attributes: + user_id (str): + Highly recommended for logged-in users. Unique identifier + for logged-in user, such as a user name. Don't set for + anonymous users. + + Always use a hashed value for this ID. + + Don't set the field to the same fixed ID for different + users. This mixes the event history of those users together, + which results in degraded model quality. + + The field must be a UTF-8 encoded string with a length limit + of 128 characters. Otherwise, an ``INVALID_ARGUMENT`` error + is returned. + user_agent (str): + User agent as included in the HTTP header. Required for + getting [SearchResponse.sponsored_results][]. + + The field must be a UTF-8 encoded string with a length limit + of 1,000 characters. Otherwise, an ``INVALID_ARGUMENT`` + error is returned. + + This should not be set when using the client side event + reporting with GTM or JavaScript tag in + [UserEventService.CollectUserEvent][google.cloud.discoveryengine.v1.UserEventService.CollectUserEvent] + or if + [UserEvent.direct_user_request][google.cloud.discoveryengine.v1.UserEvent.direct_user_request] + is set. + """ + + user_id: str = proto.Field( + proto.STRING, + number=1, + ) + user_agent: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/completion_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/completion_service.py new file mode 100644 index 000000000000..c52264e73392 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/completion_service.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "CompleteQueryRequest", + "CompleteQueryResponse", + }, +) + + +class CompleteQueryRequest(proto.Message): + r"""Request message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1.CompletionService.CompleteQuery] + method. + + Attributes: + data_store (str): + Required. The parent data store resource name for which the + completion is performed, such as + ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store``. + query (str): + Required. The typeahead input used to fetch + suggestions. Maximum length is 128 characters. + query_model (str): + Selects data model of query suggestions for serving. + Currently supported values: + + - ``document`` - Using suggestions generated from + user-imported documents. + - ``search-history`` - Using suggestions generated from the + past history of + [SearchService.Search][google.cloud.discoveryengine.v1.SearchService.Search] + API calls. Do not use it when there is no traffic for + Search API. + - ``user-event`` - Using suggestions generated from + user-imported search events. + + Default values: + + - ``document`` is the default model for regular dataStores. + - ``search-history`` is the default model for + [IndustryVertical.SITE_SEARCH][google.cloud.discoveryengine.v1.IndustryVertical.SITE_SEARCH] + dataStores. + user_pseudo_id (str): + A unique identifier for tracking visitors. For example, this + could be implemented with an HTTP cookie, which should be + able to uniquely identify a visitor on a single device. This + unique identifier should not change if the visitor logs in + or out of the website. + + This field should NOT have a fixed value such as + ``unknown_visitor``. + + This should be the same identifier as + [UserEvent.user_pseudo_id][google.cloud.discoveryengine.v1.UserEvent.user_pseudo_id] + and + [SearchRequest.user_pseudo_id][google.cloud.discoveryengine.v1.SearchRequest.user_pseudo_id]. + + The field must be a UTF-8 encoded string with a length limit + of 128 characters. Otherwise, an ``INVALID_ARGUMENT`` error + is returned. + """ + + data_store: str = proto.Field( + proto.STRING, + number=1, + ) + query: str = proto.Field( + proto.STRING, + number=2, + ) + query_model: str = proto.Field( + proto.STRING, + number=3, + ) + user_pseudo_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CompleteQueryResponse(proto.Message): + r"""Response message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1.CompletionService.CompleteQuery] + method. + + Attributes: + query_suggestions (MutableSequence[google.cloud.discoveryengine_v1.types.CompleteQueryResponse.QuerySuggestion]): + Results of the matched query suggestions. The + result list is ordered and the first result is a + top suggestion. + """ + + class QuerySuggestion(proto.Message): + r"""Suggestions as search queries. + + Attributes: + suggestion (str): + The suggestion for the query. + """ + + suggestion: str = proto.Field( + proto.STRING, + number=1, + ) + + query_suggestions: MutableSequence[QuerySuggestion] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=QuerySuggestion, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py new file mode 100644 index 000000000000..6d1958b97974 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py @@ -0,0 +1,181 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "Document", + }, +) + + +class Document(proto.Message): + r"""Document captures all raw metadata information of items to be + recommended or searched. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + struct_data (google.protobuf.struct_pb2.Struct): + The structured JSON data for the document. It should conform + to the registered + [Schema.schema][google.cloud.discoveryengine.v1.Schema.schema] + or an ``INVALID_ARGUMENT`` error is thrown. + + This field is a member of `oneof`_ ``data``. + json_data (str): + The JSON string representation of the document. It should + conform to the registered + [Schema.schema][google.cloud.discoveryengine.v1.Schema.schema] + or an ``INVALID_ARGUMENT`` error is thrown. + + This field is a member of `oneof`_ ``data``. + name (str): + Immutable. The full resource name of the document. Format: + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document_id}``. + + This field must be a UTF-8 encoded string with a length + limit of 1024 characters. + id (str): + Immutable. The identifier of the document. + + Id should conform to + `RFC-1034 `__ standard + with a length limit of 63 characters. + schema_id (str): + The identifier of the schema located in the + same data store. + content (google.cloud.discoveryengine_v1.types.Document.Content): + The unstructured data linked to this document. Content must + be set if this document is under a ``CONTENT_REQUIRED`` data + store. + parent_document_id (str): + The identifier of the parent document. Currently supports at + most two level document hierarchy. + + Id should conform to + `RFC-1034 `__ standard + with a length limit of 63 characters. + derived_struct_data (google.protobuf.struct_pb2.Struct): + Output only. This field is OUTPUT_ONLY. It contains derived + data that are not in the original input document. + """ + + class Content(proto.Message): + r"""Unstructured data linked to this document. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + raw_bytes (bytes): + The content represented as a stream of bytes. The maximum + length is 1,000,000 bytes (1 MB / ~0.95 MiB). + + Note: As with all ``bytes`` fields, this field is + represented as pure binary in Protocol Buffers and + base64-encoded string in JSON. For example, + ``abc123!?$*&()'-=@~`` should be represented as + ``YWJjMTIzIT8kKiYoKSctPUB+`` in JSON. See + https://developers.google.com/protocol-buffers/docs/proto3#json. + + This field is a member of `oneof`_ ``content``. + uri (str): + The URI of the content. Only Cloud Storage URIs (e.g. + ``gs://bucket-name/path/to/file``) are supported. The + maximum file size is 100 MB. + + This field is a member of `oneof`_ ``content``. + mime_type (str): + The MIME type of the content. Supported types: + + - ``application/pdf`` (PDF) + - ``text/html`` (HTML) + + See + https://www.iana.org/assignments/media-types/media-types.xhtml. + """ + + raw_bytes: bytes = proto.Field( + proto.BYTES, + number=2, + oneof="content", + ) + uri: str = proto.Field( + proto.STRING, + number=3, + oneof="content", + ) + mime_type: str = proto.Field( + proto.STRING, + number=1, + ) + + struct_data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=4, + oneof="data", + message=struct_pb2.Struct, + ) + json_data: str = proto.Field( + proto.STRING, + number=5, + oneof="data", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + schema_id: str = proto.Field( + proto.STRING, + number=3, + ) + content: Content = proto.Field( + proto.MESSAGE, + number=10, + message=Content, + ) + parent_document_id: str = proto.Field( + proto.STRING, + number=7, + ) + derived_struct_data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=6, + message=struct_pb2.Struct, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py new file mode 100644 index 000000000000..5bb857d3d36e --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import document as gcd_document + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "GetDocumentRequest", + "ListDocumentsRequest", + "ListDocumentsResponse", + "CreateDocumentRequest", + "UpdateDocumentRequest", + "DeleteDocumentRequest", + }, +) + + +class GetDocumentRequest(proto.Message): + r"""Request message for + [DocumentService.GetDocument][google.cloud.discoveryengine.v1.DocumentService.GetDocument] + method. + + Attributes: + name (str): + Required. Full resource name of + [Document][google.cloud.discoveryengine.v1.Document], such + as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}``. + + If the caller does not have permission to access the + [Document][google.cloud.discoveryengine.v1.Document], + regardless of whether or not it exists, a + ``PERMISSION_DENIED`` error is returned. + + If the requested + [Document][google.cloud.discoveryengine.v1.Document] does + not exist, a ``NOT_FOUND`` error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDocumentsRequest(proto.Message): + r"""Request message for + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] + method. + + Attributes: + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + Use ``default_branch`` as the branch ID, to list documents + under the default branch. + + If the caller does not have permission to list + [Documents][]s under this branch, regardless of whether or + not this branch exists, a ``PERMISSION_DENIED`` error is + returned. + page_size (int): + Maximum number of + [Document][google.cloud.discoveryengine.v1.Document]s to + return. If unspecified, defaults to 100. The maximum allowed + value is 1000. Values above 1000 will be coerced to 1000. + + If this field is negative, an ``INVALID_ARGUMENT`` error is + returned. + page_token (str): + A page token + [ListDocumentsResponse.next_page_token][google.cloud.discoveryengine.v1.ListDocumentsResponse.next_page_token], + received from a previous + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] + must match the call that provided the page token. Otherwise, + an ``INVALID_ARGUMENT`` error is returned. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDocumentsResponse(proto.Message): + r"""Response message for + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] + method. + + Attributes: + documents (MutableSequence[google.cloud.discoveryengine_v1.types.Document]): + The [Document][google.cloud.discoveryengine.v1.Document]s. + next_page_token (str): + A token that can be sent as + [ListDocumentsRequest.page_token][google.cloud.discoveryengine.v1.ListDocumentsRequest.page_token] + to retrieve the next page. If this field is omitted, there + are no subsequent pages. + """ + + @property + def raw_page(self): + return self + + documents: MutableSequence[gcd_document.Document] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_document.Document, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateDocumentRequest(proto.Message): + r"""Request message for + [DocumentService.CreateDocument][google.cloud.discoveryengine.v1.DocumentService.CreateDocument] + method. + + Attributes: + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + document (google.cloud.discoveryengine_v1.types.Document): + Required. The + [Document][google.cloud.discoveryengine.v1.Document] to + create. + document_id (str): + Required. The ID to use for the + [Document][google.cloud.discoveryengine.v1.Document], which + will become the final component of the + [Document.name][google.cloud.discoveryengine.v1.Document.name]. + + If the caller does not have permission to create the + [Document][google.cloud.discoveryengine.v1.Document], + regardless of whether or not it exists, a + ``PERMISSION_DENIED`` error is returned. + + This field must be unique among all + [Document][google.cloud.discoveryengine.v1.Document]s with + the same + [parent][google.cloud.discoveryengine.v1.CreateDocumentRequest.parent]. + Otherwise, an ``ALREADY_EXISTS`` error is returned. + + This field must conform to + `RFC-1034 `__ standard + with a length limit of 63 characters. Otherwise, an + ``INVALID_ARGUMENT`` error is returned. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + document: gcd_document.Document = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_document.Document, + ) + document_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateDocumentRequest(proto.Message): + r"""Request message for + [DocumentService.UpdateDocument][google.cloud.discoveryengine.v1.DocumentService.UpdateDocument] + method. + + Attributes: + document (google.cloud.discoveryengine_v1.types.Document): + Required. The document to update/create. + + If the caller does not have permission to update the + [Document][google.cloud.discoveryengine.v1.Document], + regardless of whether or not it exists, a + ``PERMISSION_DENIED`` error is returned. + + If the [Document][google.cloud.discoveryengine.v1.Document] + to update does not exist and + [allow_missing][google.cloud.discoveryengine.v1.UpdateDocumentRequest.allow_missing] + is not set, a ``NOT_FOUND`` error is returned. + allow_missing (bool): + If set to true, and the + [Document][google.cloud.discoveryengine.v1.Document] is not + found, a new + [Document][google.cloud.discoveryengine.v1.Document] will be + created. + """ + + document: gcd_document.Document = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_document.Document, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class DeleteDocumentRequest(proto.Message): + r"""Request message for + [DocumentService.DeleteDocument][google.cloud.discoveryengine.v1.DocumentService.DeleteDocument] + method. + + Attributes: + name (str): + Required. Full resource name of + [Document][google.cloud.discoveryengine.v1.Document], such + as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}``. + + If the caller does not have permission to delete the + [Document][google.cloud.discoveryengine.v1.Document], + regardless of whether or not it exists, a + ``PERMISSION_DENIED`` error is returned. + + If the [Document][google.cloud.discoveryengine.v1.Document] + to delete does not exist, a ``NOT_FOUND`` error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py new file mode 100644 index 000000000000..e13ed4673f23 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py @@ -0,0 +1,602 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import document, user_event + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "GcsSource", + "BigQuerySource", + "ImportErrorConfig", + "ImportUserEventsRequest", + "ImportUserEventsResponse", + "ImportUserEventsMetadata", + "ImportDocumentsMetadata", + "ImportDocumentsRequest", + "ImportDocumentsResponse", + }, +) + + +class GcsSource(proto.Message): + r"""Cloud Storage location for input content. + + Attributes: + input_uris (MutableSequence[str]): + Required. Cloud Storage URIs to input files. URI can be up + to 2000 characters long. URIs can match the full object path + (for example, ``gs://bucket/directory/object.json``) or a + pattern matching one or more files, such as + ``gs://bucket/directory/*.json``. + + A request can contain at most 100 files (or 100,000 files if + ``data_schema`` is ``content``). Each file can be up to 2 GB + (or 100 MB if ``data_schema`` is ``content``). + data_schema (str): + The schema to use when parsing the data from the source. + + Supported values for document imports: + + - ``document`` (default): One JSON + [Document][google.cloud.discoveryengine.v1.Document] per + line. Each document must have a valid + [Document.id][google.cloud.discoveryengine.v1.Document.id]. + - ``content``: Unstructured data (e.g. PDF, HTML). Each + file matched by ``input_uris`` will become a document, + with the ID set to the first 128 bits of SHA256(URI) + encoded as a hex string. + - ``custom``: One custom data JSON per row in arbitrary + format that conforms the defined + [Schema][google.cloud.discoveryengine.v1.Schema] of the + data store. This can only be used by the GENERIC Data + Store vertical. + + Supported values for user even imports: + + - ``user_event`` (default): One JSON + [UserEvent][google.cloud.discoveryengine.v1.UserEvent] + per line. + """ + + input_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + data_schema: str = proto.Field( + proto.STRING, + number=2, + ) + + +class BigQuerySource(proto.Message): + r"""BigQuery source import data from. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + partition_date (google.type.date_pb2.Date): + BigQuery time partitioned table's \_PARTITIONDATE in + YYYY-MM-DD format. + + This field is a member of `oneof`_ ``partition``. + project_id (str): + The project ID (can be project # or ID) that + the BigQuery source is in with a length limit of + 128 characters. If not specified, inherits the + project ID from the parent request. + dataset_id (str): + Required. The BigQuery data set to copy the + data from with a length limit of 1,024 + characters. + table_id (str): + Required. The BigQuery table to copy the data + from with a length limit of 1,024 characters. + gcs_staging_dir (str): + Intermediate Cloud Storage directory used for + the import with a length limit of 2,000 + characters. Can be specified if one wants to + have the BigQuery export to a specific Cloud + Storage directory. + data_schema (str): + The schema to use when parsing the data from the source. + + Supported values for user event imports: + + - ``user_event`` (default): One + [UserEvent][google.cloud.discoveryengine.v1.UserEvent] + per row. + + Supported values for document imports: + + - ``document`` (default): One + [Document][google.cloud.discoveryengine.v1.Document] + format per row. Each document must have a valid + [Document.id][google.cloud.discoveryengine.v1.Document.id] + and one of + [Document.json_data][google.cloud.discoveryengine.v1.Document.json_data] + or + [Document.struct_data][google.cloud.discoveryengine.v1.Document.struct_data]. + - ``custom``: One custom data per row in arbitrary format + that conforms the defined + [Schema][google.cloud.discoveryengine.v1.Schema] of the + data store. This can only be used by the GENERIC Data + Store vertical. + """ + + partition_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=5, + oneof="partition", + message=date_pb2.Date, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + dataset_id: str = proto.Field( + proto.STRING, + number=2, + ) + table_id: str = proto.Field( + proto.STRING, + number=3, + ) + gcs_staging_dir: str = proto.Field( + proto.STRING, + number=4, + ) + data_schema: str = proto.Field( + proto.STRING, + number=6, + ) + + +class ImportErrorConfig(proto.Message): + r"""Configuration of destination for Import related errors. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_prefix (str): + Cloud Storage prefix for import errors. This must be an + empty, existing Cloud Storage directory. Import errors will + be written to sharded files in this directory, one per line, + as a JSON-encoded ``google.rpc.Status`` message. + + This field is a member of `oneof`_ ``destination``. + """ + + gcs_prefix: str = proto.Field( + proto.STRING, + number=1, + oneof="destination", + ) + + +class ImportUserEventsRequest(proto.Message): + r"""Request message for the ImportUserEvents request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + inline_source (google.cloud.discoveryengine_v1.types.ImportUserEventsRequest.InlineSource): + Required. The Inline source for the input + content for UserEvents. + + This field is a member of `oneof`_ ``source``. + gcs_source (google.cloud.discoveryengine_v1.types.GcsSource): + Required. Cloud Storage location for the + input content. + + This field is a member of `oneof`_ ``source``. + bigquery_source (google.cloud.discoveryengine_v1.types.BigQuerySource): + Required. BigQuery input source. + + This field is a member of `oneof`_ ``source``. + parent (str): + Required. Parent DataStore resource name, of the form + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`` + error_config (google.cloud.discoveryengine_v1.types.ImportErrorConfig): + The desired location of errors incurred + during the Import. Cannot be set for inline user + event imports. + """ + + class InlineSource(proto.Message): + r"""The inline source for the input config for ImportUserEvents + method. + + Attributes: + user_events (MutableSequence[google.cloud.discoveryengine_v1.types.UserEvent]): + Required. A list of user events to import. + Recommended max of 10k items. + """ + + user_events: MutableSequence[user_event.UserEvent] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=user_event.UserEvent, + ) + + inline_source: InlineSource = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message=InlineSource, + ) + gcs_source: "GcsSource" = proto.Field( + proto.MESSAGE, + number=3, + oneof="source", + message="GcsSource", + ) + bigquery_source: "BigQuerySource" = proto.Field( + proto.MESSAGE, + number=4, + oneof="source", + message="BigQuerySource", + ) + parent: str = proto.Field( + proto.STRING, + number=1, + ) + error_config: "ImportErrorConfig" = proto.Field( + proto.MESSAGE, + number=5, + message="ImportErrorConfig", + ) + + +class ImportUserEventsResponse(proto.Message): + r"""Response of the ImportUserEventsRequest. If the long running + operation was successful, then this message is returned by the + google.longrunning.Operations.response field if the operation + was successful. + + Attributes: + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while + processing the request. + error_config (google.cloud.discoveryengine_v1.types.ImportErrorConfig): + Echoes the destination for the complete + errors if this field was set in the request. + joined_events_count (int): + Count of user events imported with complete + existing Documents. + unjoined_events_count (int): + Count of user events imported, but with + Document information not found in the existing + Branch. + """ + + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + error_config: "ImportErrorConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="ImportErrorConfig", + ) + joined_events_count: int = proto.Field( + proto.INT64, + number=3, + ) + unjoined_events_count: int = proto.Field( + proto.INT64, + number=4, + ) + + +class ImportUserEventsMetadata(proto.Message): + r"""Metadata related to the progress of the Import operation. + This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + success_count (int): + Count of entries that were processed + successfully. + failure_count (int): + Count of entries that encountered errors + while processing. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + success_count: int = proto.Field( + proto.INT64, + number=3, + ) + failure_count: int = proto.Field( + proto.INT64, + number=4, + ) + + +class ImportDocumentsMetadata(proto.Message): + r"""Metadata related to the progress of the ImportDocuments + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + success_count (int): + Count of entries that were processed + successfully. + failure_count (int): + Count of entries that encountered errors + while processing. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + success_count: int = proto.Field( + proto.INT64, + number=3, + ) + failure_count: int = proto.Field( + proto.INT64, + number=4, + ) + + +class ImportDocumentsRequest(proto.Message): + r"""Request message for Import methods. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + inline_source (google.cloud.discoveryengine_v1.types.ImportDocumentsRequest.InlineSource): + The Inline source for the input content for + documents. + + This field is a member of `oneof`_ ``source``. + gcs_source (google.cloud.discoveryengine_v1.types.GcsSource): + Cloud Storage location for the input content. + + This field is a member of `oneof`_ ``source``. + bigquery_source (google.cloud.discoveryengine_v1.types.BigQuerySource): + BigQuery input source. + + This field is a member of `oneof`_ ``source``. + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + Requires create/update permission. + error_config (google.cloud.discoveryengine_v1.types.ImportErrorConfig): + The desired location of errors incurred + during the Import. + reconciliation_mode (google.cloud.discoveryengine_v1.types.ImportDocumentsRequest.ReconciliationMode): + The mode of reconciliation between existing documents and + the documents to be imported. Defaults to + [ReconciliationMode.INCREMENTAL][google.cloud.discoveryengine.v1.ImportDocumentsRequest.ReconciliationMode.INCREMENTAL]. + auto_generate_ids (bool): + Whether to automatically generate IDs for the documents if + absent. + + If set to ``true``, + [Document.id][google.cloud.discoveryengine.v1.Document.id]s + are automatically generated based on the hash of the + payload, where IDs may not be consistent during multiple + imports. In which case + [ReconciliationMode.FULL][google.cloud.discoveryengine.v1.ImportDocumentsRequest.ReconciliationMode.FULL] + is highly recommended to avoid duplicate contents. If unset + or set to ``false``, + [Document.id][google.cloud.discoveryengine.v1.Document.id]s + have to be specified using + [id_field][google.cloud.discoveryengine.v1.ImportDocumentsRequest.id_field], + otherwises, documents without IDs will fail to be imported. + + Only set this field when using + [GcsSource][google.cloud.discoveryengine.v1.GcsSource] or + [BigQuerySource][google.cloud.discoveryengine.v1.BigQuerySource], + and when + [GcsSource.data_schema][google.cloud.discoveryengine.v1.GcsSource.data_schema] + or + [BigQuerySource.data_schema][google.cloud.discoveryengine.v1.BigQuerySource.data_schema] + is ``custom``. Otherwise, an INVALID_ARGUMENT error is + thrown. + id_field (str): + The field in the Cloud Storage and BigQuery sources that + indicates the unique IDs of the documents. + + For [GcsSource][google.cloud.discoveryengine.v1.GcsSource] + it is the key of the JSON field. For instance, ``my_id`` for + JSON ``{"my_id": "some_uuid"}``. For + [BigQuerySource][google.cloud.discoveryengine.v1.BigQuerySource] + it is the column name of the BigQuery table where the unique + ids are stored. + + The values of the JSON field or the BigQuery column will be + used as the + [Document.id][google.cloud.discoveryengine.v1.Document.id]s. + The JSON field or the BigQuery column must be of string + type, and the values must be set as valid strings conform to + `RFC-1034 `__ with 1-63 + characters. Otherwise, documents without valid IDs will fail + to be imported. + + Only set this field when using + [GcsSource][google.cloud.discoveryengine.v1.GcsSource] or + [BigQuerySource][google.cloud.discoveryengine.v1.BigQuerySource], + and when + [GcsSource.data_schema][google.cloud.discoveryengine.v1.GcsSource.data_schema] + or + [BigQuerySource.data_schema][google.cloud.discoveryengine.v1.BigQuerySource.data_schema] + is ``custom``. And only set this field when + [auto_generate_ids][google.cloud.discoveryengine.v1.ImportDocumentsRequest.auto_generate_ids] + is unset or set as ``false``. Otherwise, an INVALID_ARGUMENT + error is thrown. + + If it is unset, a default value ``_id`` is used when + importing from the allowed data sources. + """ + + class ReconciliationMode(proto.Enum): + r"""Indicates how imported documents are reconciled with the + existing documents created or imported before. + + Values: + RECONCILIATION_MODE_UNSPECIFIED (0): + Defaults to INCREMENTAL. + INCREMENTAL (1): + Inserts new documents or updates existing + documents. + FULL (2): + Calculates diff and replaces the entire + document dataset. Existing documents may be + deleted if they are not present in the source + location. + """ + RECONCILIATION_MODE_UNSPECIFIED = 0 + INCREMENTAL = 1 + FULL = 2 + + class InlineSource(proto.Message): + r"""The inline source for the input config for ImportDocuments + method. + + Attributes: + documents (MutableSequence[google.cloud.discoveryengine_v1.types.Document]): + Required. A list of documents to update/create. Each + document must have a valid + [Document.id][google.cloud.discoveryengine.v1.Document.id]. + Recommended max of 100 items. + """ + + documents: MutableSequence[document.Document] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=document.Document, + ) + + inline_source: InlineSource = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message=InlineSource, + ) + gcs_source: "GcsSource" = proto.Field( + proto.MESSAGE, + number=3, + oneof="source", + message="GcsSource", + ) + bigquery_source: "BigQuerySource" = proto.Field( + proto.MESSAGE, + number=4, + oneof="source", + message="BigQuerySource", + ) + parent: str = proto.Field( + proto.STRING, + number=1, + ) + error_config: "ImportErrorConfig" = proto.Field( + proto.MESSAGE, + number=5, + message="ImportErrorConfig", + ) + reconciliation_mode: ReconciliationMode = proto.Field( + proto.ENUM, + number=6, + enum=ReconciliationMode, + ) + auto_generate_ids: bool = proto.Field( + proto.BOOL, + number=8, + ) + id_field: str = proto.Field( + proto.STRING, + number=9, + ) + + +class ImportDocumentsResponse(proto.Message): + r"""Response of the + [ImportDocumentsRequest][google.cloud.discoveryengine.v1.ImportDocumentsRequest]. + If the long running operation is done, then this message is returned + by the google.longrunning.Operations.response field if the operation + was successful. + + Attributes: + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while + processing the request. + error_config (google.cloud.discoveryengine_v1.types.ImportErrorConfig): + Echoes the destination for the complete + errors in the request if set. + """ + + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + error_config: "ImportErrorConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="ImportErrorConfig", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py new file mode 100644 index 000000000000..319daddfb956 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "PurgeDocumentsRequest", + "PurgeDocumentsResponse", + "PurgeDocumentsMetadata", + }, +) + + +class PurgeDocumentsRequest(proto.Message): + r"""Request message for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments] + method. + + Attributes: + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + filter (str): + Required. Filter matching documents to purge. Only currently + supported value is ``*`` (all items). + force (bool): + Actually performs the purge. If ``force`` is set to false, + return the expected purge count without deleting any + documents. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class PurgeDocumentsResponse(proto.Message): + r"""Response message for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments] + method. If the long running operation is successfully done, then + this message is returned by the + google.longrunning.Operations.response field. + + Attributes: + purge_count (int): + The total count of documents purged as a + result of the operation. + purge_sample (MutableSequence[str]): + A sample of document names that will be deleted. Only + populated if ``force`` is set to false. A max of 100 names + will be returned and the names are chosen at random. + """ + + purge_count: int = proto.Field( + proto.INT64, + number=1, + ) + purge_sample: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class PurgeDocumentsMetadata(proto.Message): + r"""Metadata related to the progress of the PurgeDocuments + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + success_count (int): + Count of entries that were deleted + successfully. + failure_count (int): + Count of entries that encountered errors + while processing. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + success_count: int = proto.Field( + proto.INT64, + number=3, + ) + failure_count: int = proto.Field( + proto.INT64, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/schema.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/schema.py new file mode 100644 index 000000000000..c7d8f2915f13 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/schema.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "Schema", + }, +) + + +class Schema(proto.Message): + r"""Defines the structure and layout of a type of document data. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + struct_schema (google.protobuf.struct_pb2.Struct): + The structured representation of the schema. + + This field is a member of `oneof`_ ``schema``. + json_schema (str): + The JSON representation of the schema. + + This field is a member of `oneof`_ ``schema``. + name (str): + Immutable. The full resource name of the schema, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/schemas/{schema}``. + + This field must be a UTF-8 encoded string with a length + limit of 1024 characters. + """ + + struct_schema: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + oneof="schema", + message=struct_pb2.Struct, + ) + json_schema: str = proto.Field( + proto.STRING, + number=3, + oneof="schema", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/schema_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/schema_service.py new file mode 100644 index 000000000000..62369e1aea51 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/schema_service.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import schema as gcd_schema + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "GetSchemaRequest", + "ListSchemasRequest", + "ListSchemasResponse", + "CreateSchemaRequest", + "UpdateSchemaRequest", + "DeleteSchemaRequest", + "CreateSchemaMetadata", + "UpdateSchemaMetadata", + "DeleteSchemaMetadata", + }, +) + + +class GetSchemaRequest(proto.Message): + r"""Request message for + [SchemaService.GetSchema][google.cloud.discoveryengine.v1.SchemaService.GetSchema] + method. + + Attributes: + name (str): + Required. The full resource name of the schema, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/schemas/{schema}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSchemasRequest(proto.Message): + r"""Request message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1.SchemaService.ListSchemas] + method. + + Attributes: + parent (str): + Required. The parent data store resource name, in the format + of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + page_size (int): + The maximum number of + [Schema][google.cloud.discoveryengine.v1.Schema]s to return. + The service may return fewer than this value. + + If unspecified, at most 100 + [Schema][google.cloud.discoveryengine.v1.Schema]s will be + returned. + + The maximum value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + A page token, received from a previous + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1.SchemaService.ListSchemas] + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1.SchemaService.ListSchemas] + must match the call that provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSchemasResponse(proto.Message): + r"""Response message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1.SchemaService.ListSchemas] + method. + + Attributes: + schemas (MutableSequence[google.cloud.discoveryengine_v1.types.Schema]): + The [Schema][google.cloud.discoveryengine.v1.Schema]s. + next_page_token (str): + A token that can be sent as + [ListSchemasRequest.page_token][google.cloud.discoveryengine.v1.ListSchemasRequest.page_token] + to retrieve the next page. If this field is omitted, there + are no subsequent pages. + """ + + @property + def raw_page(self): + return self + + schemas: MutableSequence[gcd_schema.Schema] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_schema.Schema, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateSchemaRequest(proto.Message): + r"""Request message for + [SchemaService.CreateSchema][google.cloud.discoveryengine.v1.SchemaService.CreateSchema] + method. + + Attributes: + parent (str): + Required. The parent data store resource name, in the format + of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + schema (google.cloud.discoveryengine_v1.types.Schema): + Required. The + [Schema][google.cloud.discoveryengine.v1.Schema] to create. + schema_id (str): + Required. The ID to use for the + [Schema][google.cloud.discoveryengine.v1.Schema], which will + become the final component of the + [Schema.name][google.cloud.discoveryengine.v1.Schema.name]. + + This field should conform to + `RFC-1034 `__ standard + with a length limit of 63 characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + schema: gcd_schema.Schema = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_schema.Schema, + ) + schema_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateSchemaRequest(proto.Message): + r"""Request message for + [SchemaService.UpdateSchema][google.cloud.discoveryengine.v1.SchemaService.UpdateSchema] + method. + + Attributes: + schema (google.cloud.discoveryengine_v1.types.Schema): + Required. The + [Schema][google.cloud.discoveryengine.v1.Schema] to update. + allow_missing (bool): + If set to true, and the + [Schema][google.cloud.discoveryengine.v1.Schema] is not + found, a new + [Schema][google.cloud.discoveryengine.v1.Schema] will be + created. In this situation, ``update_mask`` is ignored. + """ + + schema: gcd_schema.Schema = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_schema.Schema, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteSchemaRequest(proto.Message): + r"""Request message for + [SchemaService.DeleteSchema][google.cloud.discoveryengine.v1.SchemaService.DeleteSchema] + method. + + Attributes: + name (str): + Required. The full resource name of the schema, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/schemas/{schema}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateSchemaMetadata(proto.Message): + r"""Metadata for Create Schema LRO. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class UpdateSchemaMetadata(proto.Message): + r"""Metadata for UpdateSchema LRO. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class DeleteSchemaMetadata(proto.Message): + r"""Metadata for DeleteSchema LRO. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py new file mode 100644 index 000000000000..9807af5e7e60 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py @@ -0,0 +1,476 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import common +from google.cloud.discoveryengine_v1.types import document as gcd_document + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "SearchRequest", + "SearchResponse", + }, +) + + +class SearchRequest(proto.Message): + r"""Request message for + [SearchService.Search][google.cloud.discoveryengine.v1.SearchService.Search] + method. + + Attributes: + serving_config (str): + Required. The resource name of the Search serving config, + such as + ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store/servingConfigs/default_serving_config``. + This field is used to identify the serving configuration + name, set of models used to make the search. + branch (str): + The branch resource name, such as + ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store/branches/0``. + + Use ``default_branch`` as the branch ID or leave this field + empty, to search documents under the default branch. + query (str): + Raw search query. + page_size (int): + Maximum number of + [Document][google.cloud.discoveryengine.v1.Document]s to + return. If unspecified, defaults to a reasonable value. The + maximum allowed value is 100. Values above 100 will be + coerced to 100. + + If this field is negative, an ``INVALID_ARGUMENT`` is + returned. + page_token (str): + A page token received from a previous + [SearchService.Search][google.cloud.discoveryengine.v1.SearchService.Search] + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + [SearchService.Search][google.cloud.discoveryengine.v1.SearchService.Search] + must match the call that provided the page token. Otherwise, + an ``INVALID_ARGUMENT`` error is returned. + offset (int): + A 0-indexed integer that specifies the current offset (that + is, starting result location, amongst the + [Document][google.cloud.discoveryengine.v1.Document]s deemed + by the API as relevant) in search results. This field is + only considered if + [page_token][google.cloud.discoveryengine.v1.SearchRequest.page_token] + is unset. + + If this field is negative, an ``INVALID_ARGUMENT`` is + returned. + user_info (google.cloud.discoveryengine_v1.types.UserInfo): + Information about the end user. Highly recommended for + analytics. The user_agent string in UserInfo will be used to + deduce device_type for analytics. + params (MutableMapping[str, google.protobuf.struct_pb2.Value]): + Additional search parameters. + + For public website search only, supported values are: + + - ``user_country_code``: string. Default empty. If set to + non-empty, results are restricted or boosted based on the + location provided. + - ``search_type``: double. Default empty. Enables + non-webpage searching depending on the value. The only + valid non-default value is 1, which enables image + searching. + query_expansion_spec (google.cloud.discoveryengine_v1.types.SearchRequest.QueryExpansionSpec): + The query expansion specification that + specifies the conditions under which query + expansion will occur. + spell_correction_spec (google.cloud.discoveryengine_v1.types.SearchRequest.SpellCorrectionSpec): + The spell correction specification that + specifies the mode under which spell correction + will take effect. + user_pseudo_id (str): + A unique identifier for tracking visitors. For example, this + could be implemented with an HTTP cookie, which should be + able to uniquely identify a visitor on a single device. This + unique identifier should not change if the visitor logs in + or out of the website. + + This field should NOT have a fixed value such as + ``unknown_visitor``. + + This should be the same identifier as + [UserEvent.user_pseudo_id][google.cloud.discoveryengine.v1.UserEvent.user_pseudo_id] + and + [CompleteQueryRequest.user_pseudo_id][google.cloud.discoveryengine.v1.CompleteQueryRequest.user_pseudo_id] + + The field must be a UTF-8 encoded string with a length limit + of 128 characters. Otherwise, an ``INVALID_ARGUMENT`` error + is returned. + content_search_spec (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec): + The content search spec that configs the + desired behavior of content search. + safe_search (bool): + Whether to turn on safe search. This is only supported for + [ContentConfig.PUBLIC_WEBSITE][]. + user_labels (MutableMapping[str, str]): + The user labels applied to a resource must meet the + following requirements: + + - Each resource can have multiple labels, up to a maximum + of 64. + - Each label must be a key-value pair. + - Keys have a minimum length of 1 character and a maximum + length of 63 characters and cannot be empty. Values can + be empty and have a maximum length of 63 characters. + - Keys and values can contain only lowercase letters, + numeric characters, underscores, and dashes. All + characters must use UTF-8 encoding, and international + characters are allowed. + - The key portion of a label must be unique. However, you + can use the same key with multiple resources. + - Keys must start with a lowercase letter or international + character. + + See `Google Cloud + Document `__ + for more details. + """ + + class QueryExpansionSpec(proto.Message): + r"""Specification to determine under which conditions query + expansion should occur. + + Attributes: + condition (google.cloud.discoveryengine_v1.types.SearchRequest.QueryExpansionSpec.Condition): + The condition under which query expansion should occur. + Default to + [Condition.DISABLED][google.cloud.discoveryengine.v1.SearchRequest.QueryExpansionSpec.Condition.DISABLED]. + """ + + class Condition(proto.Enum): + r"""Enum describing under which condition query expansion should + occur. + + Values: + CONDITION_UNSPECIFIED (0): + Unspecified query expansion condition. In this case, server + behavior defaults to + [Condition.DISABLED][google.cloud.discoveryengine.v1.SearchRequest.QueryExpansionSpec.Condition.DISABLED]. + DISABLED (1): + Disabled query expansion. Only the exact search query is + used, even if + [SearchResponse.total_size][google.cloud.discoveryengine.v1.SearchResponse.total_size] + is zero. + AUTO (2): + Automatic query expansion built by the Search + API. + """ + CONDITION_UNSPECIFIED = 0 + DISABLED = 1 + AUTO = 2 + + condition: "SearchRequest.QueryExpansionSpec.Condition" = proto.Field( + proto.ENUM, + number=1, + enum="SearchRequest.QueryExpansionSpec.Condition", + ) + + class SpellCorrectionSpec(proto.Message): + r"""The specification for query spell correction. + + Attributes: + mode (google.cloud.discoveryengine_v1.types.SearchRequest.SpellCorrectionSpec.Mode): + The mode under which spell correction should take effect to + replace the original search query. Default to + [Mode.AUTO][google.cloud.discoveryengine.v1.SearchRequest.SpellCorrectionSpec.Mode.AUTO]. + """ + + class Mode(proto.Enum): + r"""Enum describing under which mode spell correction should + occur. + + Values: + MODE_UNSPECIFIED (0): + Unspecified spell correction mode. In this case, server + behavior defaults to + [Mode.AUTO][google.cloud.discoveryengine.v1.SearchRequest.SpellCorrectionSpec.Mode.AUTO]. + SUGGESTION_ONLY (1): + Search API will try to find a spell suggestion if there is + any and put in the + [SearchResponse.corrected_query][google.cloud.discoveryengine.v1.SearchResponse.corrected_query]. + The spell suggestion will not be used as the search query. + AUTO (2): + Automatic spell correction built by the + Search API. Search will be based on the + corrected query if found. + """ + MODE_UNSPECIFIED = 0 + SUGGESTION_ONLY = 1 + AUTO = 2 + + mode: "SearchRequest.SpellCorrectionSpec.Mode" = proto.Field( + proto.ENUM, + number=1, + enum="SearchRequest.SpellCorrectionSpec.Mode", + ) + + class ContentSearchSpec(proto.Message): + r"""The specification that configs the desired behavior of the + UCS content search. + + Attributes: + snippet_spec (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.SnippetSpec): + If there is no snippet spec provided, there + will be no snippet in the search result. + summary_spec (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.SummarySpec): + If there is no summary spec provided, there + will be no summary in the search response. + """ + + class SnippetSpec(proto.Message): + r"""The specification that configs the snippet in the search + results. + + Attributes: + max_snippet_count (int): + Max number of snippets returned in each search result. If + the matching snippets is less than the max_snippet_count, + return all of the snippets; otherwise, return the + max_snippet_count. + + At most 5 snippets will be returned for each SearchResult. + reference_only (bool): + if true, only snippet reference is returned. + """ + + max_snippet_count: int = proto.Field( + proto.INT32, + number=1, + ) + reference_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class SummarySpec(proto.Message): + r"""The specification that configs the summary in the search + response. + + Attributes: + summary_result_count (int): + The number of top results the summary should be generated + from. If the number of returned results is less than + summary_result_count, then the summary would be derived from + all the results; otherwise, the summary would be derived + from the top results. + + At most 5 results can be used for generating summary. + """ + + summary_result_count: int = proto.Field( + proto.INT32, + number=1, + ) + + snippet_spec: "SearchRequest.ContentSearchSpec.SnippetSpec" = proto.Field( + proto.MESSAGE, + number=1, + message="SearchRequest.ContentSearchSpec.SnippetSpec", + ) + summary_spec: "SearchRequest.ContentSearchSpec.SummarySpec" = proto.Field( + proto.MESSAGE, + number=2, + message="SearchRequest.ContentSearchSpec.SummarySpec", + ) + + serving_config: str = proto.Field( + proto.STRING, + number=1, + ) + branch: str = proto.Field( + proto.STRING, + number=2, + ) + query: str = proto.Field( + proto.STRING, + number=3, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + offset: int = proto.Field( + proto.INT32, + number=6, + ) + user_info: common.UserInfo = proto.Field( + proto.MESSAGE, + number=21, + message=common.UserInfo, + ) + params: MutableMapping[str, struct_pb2.Value] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=11, + message=struct_pb2.Value, + ) + query_expansion_spec: QueryExpansionSpec = proto.Field( + proto.MESSAGE, + number=13, + message=QueryExpansionSpec, + ) + spell_correction_spec: SpellCorrectionSpec = proto.Field( + proto.MESSAGE, + number=14, + message=SpellCorrectionSpec, + ) + user_pseudo_id: str = proto.Field( + proto.STRING, + number=15, + ) + content_search_spec: ContentSearchSpec = proto.Field( + proto.MESSAGE, + number=24, + message=ContentSearchSpec, + ) + safe_search: bool = proto.Field( + proto.BOOL, + number=20, + ) + user_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=22, + ) + + +class SearchResponse(proto.Message): + r"""Response message for + [SearchService.Search][google.cloud.discoveryengine.v1.SearchService.Search] + method. + + Attributes: + results (MutableSequence[google.cloud.discoveryengine_v1.types.SearchResponse.SearchResult]): + A list of matched documents. The order + represents the ranking. + total_size (int): + The estimated total count of matched items irrespective of + pagination. The count of + [results][google.cloud.discoveryengine.v1.SearchResponse.results] + returned by pagination may be less than the + [total_size][google.cloud.discoveryengine.v1.SearchResponse.total_size] + that matches. + attribution_token (str): + A unique search token. This should be included in the + [UserEvent][google.cloud.discoveryengine.v1.UserEvent] logs + resulting from this search, which enables accurate + attribution of search model performance. + next_page_token (str): + A token that can be sent as + [SearchRequest.page_token][google.cloud.discoveryengine.v1.SearchRequest.page_token] + to retrieve the next page. If this field is omitted, there + are no subsequent pages. + corrected_query (str): + Contains the spell corrected query, if found. If the spell + correction type is AUTOMATIC, then the search results are + based on corrected_query. Otherwise the original query is + used for search. + summary (google.cloud.discoveryengine_v1.types.SearchResponse.Summary): + A summary as part of the search results. This field is only + returned if + [SearchRequest.ContentSearchSpec.summary_spec][google.cloud.discoveryengine.v1.SearchRequest.ContentSearchSpec.summary_spec] + is set. + """ + + class SearchResult(proto.Message): + r"""Represents the search results. + + Attributes: + id (str): + [Document.id][google.cloud.discoveryengine.v1.Document.id] + of the searched + [Document][google.cloud.discoveryengine.v1.Document]. + document (google.cloud.discoveryengine_v1.types.Document): + The document data snippet in the search + response. Only fields that are marked as + retrievable are populated. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + document: gcd_document.Document = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_document.Document, + ) + + class Summary(proto.Message): + r"""Summary of the top N search result specified by the summary + spec. + + Attributes: + summary_text (str): + The summary content. + """ + + summary_text: str = proto.Field( + proto.STRING, + number=1, + ) + + @property + def raw_page(self): + return self + + results: MutableSequence[SearchResult] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=SearchResult, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) + attribution_token: str = proto.Field( + proto.STRING, + number=4, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=5, + ) + corrected_query: str = proto.Field( + proto.STRING, + number=7, + ) + summary: Summary = proto.Field( + proto.MESSAGE, + number=9, + message=Summary, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py new file mode 100644 index 000000000000..4a51f8d86b55 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py @@ -0,0 +1,734 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import common + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "UserEvent", + "PageInfo", + "SearchInfo", + "CompletionInfo", + "TransactionInfo", + "DocumentInfo", + "PanelInfo", + "MediaInfo", + }, +) + + +class UserEvent(proto.Message): + r"""UserEvent captures all metadata information Discovery Engine + API needs to know about how end users interact with customers' + website. + + Attributes: + event_type (str): + Required. User event type. Allowed values are: + + Generic values: + + - ``search``: Search for Documents. + - ``view-item``: Detailed page view of a Document. + - ``view-item-list``: View of a panel or ordered list of + Documents. + - ``view-home-page``: View of the home page. + - ``view-category-page``: View of a category page, e.g. + Home > Men > Jeans + + Retail-related values: + + - ``add-to-cart``: Add an item(s) to cart, e.g. in Retail + online shopping + - ``purchase``: Purchase an item(s) + + Media-related values: + + - ``media-play``: Start/resume watching a video, playing a + song, etc. + - ``media-complete``: Finished or stopped midway through a + video, song, etc. + user_pseudo_id (str): + Required. A unique identifier for tracking visitors. + + For example, this could be implemented with an HTTP cookie, + which should be able to uniquely identify a visitor on a + single device. This unique identifier should not change if + the visitor log in/out of the website. + + Do not set the field to the same fixed ID for different + users. This mixes the event history of those users together, + which results in degraded model quality. + + The field must be a UTF-8 encoded string with a length limit + of 128 characters. Otherwise, an INVALID_ARGUMENT error is + returned. + + The field should not contain PII or user-data. We recommend + to use Google Analytics `Client + ID `__ + for this field. + event_time (google.protobuf.timestamp_pb2.Timestamp): + Only required for + [UserEventService.ImportUserEvents][google.cloud.discoveryengine.v1.UserEventService.ImportUserEvents] + method. Timestamp of when the user event happened. + user_info (google.cloud.discoveryengine_v1.types.UserInfo): + Information about the end user. + direct_user_request (bool): + Should set to true if the request is made directly from the + end user, in which case the + [UserEvent.user_info.user_agent][google.cloud.discoveryengine.v1.UserInfo.user_agent] + can be populated from the HTTP request. + + This flag should be set only if the API request is made + directly from the end user such as a mobile app (and not if + a gateway or a server is processing and pushing the user + events). + + This should not be set when using the JavaScript tag in + [UserEventService.CollectUserEvent][google.cloud.discoveryengine.v1.UserEventService.CollectUserEvent]. + session_id (str): + A unique identifier for tracking a visitor session with a + length limit of 128 bytes. A session is an aggregation of an + end user behavior in a time span. + + A general guideline to populate the session_id: + + 1. If user has no activity for 30 min, a new session_id + should be assigned. + 2. The session_id should be unique across users, suggest use + uuid or add + [UserEvent.user_pseudo_id][google.cloud.discoveryengine.v1.UserEvent.user_pseudo_id] + as prefix. + page_info (google.cloud.discoveryengine_v1.types.PageInfo): + Page metadata such as categories and other critical + information for certain event types such as + ``view-category-page``. + attribution_token (str): + Token to attribute an API response to user action(s) to + trigger the event. + + Highly recommended for user events that are the result of + [RecommendationService.Recommend][]. This field enables + accurate attribution of recommendation model performance. + + The value must be one of: + + - [PredictResponse.attribution_token][] for events that are + the result of [RecommendationService.Recommend][]. + - [SearchResponse.attribution_token][google.cloud.discoveryengine.v1.SearchResponse.attribution_token] + for events that are the result of + [SearchService.Search][google.cloud.discoveryengine.v1.SearchService.Search]. + - [CompleteQueryResponse.attribution_token][] for events + that are the result of + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1.CompletionService.CompleteQuery]. + + This token enables us to accurately attribute page view or + conversion completion back to the event and the particular + predict response containing this clicked/purchased product. + If user clicks on product K in the recommendation results, + pass [PredictResponse.attribution_token][] as a URL + parameter to product K's page. When recording events on + product K's page, log the + [PredictResponse.attribution_token][] to this field. + filter (str): + The filter syntax consists of an expression language for + constructing a predicate from one or more fields of the + documents being filtered. + + One example is for ``search`` events, the associated + [SearchRequest][google.cloud.discoveryengine.v1.SearchRequest] + may contain a filter expression in [SearchRequest.filter][] + conforming to https://google.aip.dev/160#filtering. + + Similarly, for ``view-item-list`` events that are generated + from a [RecommendationService.RecommendRequest][], this + field may be populated directly from + [RecommendationService.RecommendRequest.filter][] conforming + to https://google.aip.dev/160#filtering. + + The value must be a UTF-8 encoded string with a length limit + of 1,000 characters. Otherwise, an INVALID_ARGUMENT error is + returned. + documents (MutableSequence[google.cloud.discoveryengine_v1.types.DocumentInfo]): + List of Documents associated with this user event. + + This field is optional except for the following event types: + + - ``view-item`` + - ``add-to-cart`` + - ``purchase`` + - ``media-play`` + - ``media-complete`` + + In a ``search`` event, this field represents the documents + returned to the end user on the current page (the end user + may have not finished browsing the whole page yet). When a + new page is returned to the end user, after + pagination/filtering/ordering even for the same query, a new + ``search`` event with different + [UserEvent.documents][google.cloud.discoveryengine.v1.UserEvent.documents] + is desired. + panel (google.cloud.discoveryengine_v1.types.PanelInfo): + Panel metadata associated with this user + event. + search_info (google.cloud.discoveryengine_v1.types.SearchInfo): + Search API details related to the event. + + This field should be set for ``search`` event. + completion_info (google.cloud.discoveryengine_v1.types.CompletionInfo): + CompleteQuery API details related to the event. + + This field should be set for ``search`` event when + autocomplete function is enabled and the user clicks a + suggestion for search. + transaction_info (google.cloud.discoveryengine_v1.types.TransactionInfo): + The transaction metadata (if any) associated + with this user event. + tag_ids (MutableSequence[str]): + A list of identifiers for the independent + experiment groups this user event belongs to. + This is used to distinguish between user events + associated with different experiment setups on + the customer end. + promotion_ids (MutableSequence[str]): + The promotion IDs if this is an event + associated with promotions. Currently, this + field is restricted to at most one ID. + attributes (MutableMapping[str, google.cloud.discoveryengine_v1.types.CustomAttribute]): + Extra user event features to include in the recommendation + model. These attributes must NOT contain data that needs to + be parsed or processed further, e.g. JSON or other + encodings. + + If you provide custom attributes for ingested user events, + also include them in the user events that you associate with + prediction requests. Custom attribute formatting must be + consistent between imported events and events provided with + prediction requests. This lets the Discovery Engine API use + those custom attributes when training models and serving + predictions, which helps improve recommendation quality. + + This field needs to pass all below criteria, otherwise an + ``INVALID_ARGUMENT`` error is returned: + + - The key must be a UTF-8 encoded string with a length + limit of 5,000 characters. + - For text attributes, at most 400 values are allowed. + Empty values are not allowed. Each value must be a UTF-8 + encoded string with a length limit of 256 characters. + - For number attributes, at most 400 values are allowed. + + For product recommendations, an example of extra user + information is ``traffic_channel``, which is how a user + arrives at the site. Users can arrive at the site by coming + to the site directly, coming through Google search, or in + other ways. + media_info (google.cloud.discoveryengine_v1.types.MediaInfo): + Media-specific info. + """ + + event_type: str = proto.Field( + proto.STRING, + number=1, + ) + user_pseudo_id: str = proto.Field( + proto.STRING, + number=2, + ) + event_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + user_info: common.UserInfo = proto.Field( + proto.MESSAGE, + number=4, + message=common.UserInfo, + ) + direct_user_request: bool = proto.Field( + proto.BOOL, + number=5, + ) + session_id: str = proto.Field( + proto.STRING, + number=6, + ) + page_info: "PageInfo" = proto.Field( + proto.MESSAGE, + number=7, + message="PageInfo", + ) + attribution_token: str = proto.Field( + proto.STRING, + number=8, + ) + filter: str = proto.Field( + proto.STRING, + number=9, + ) + documents: MutableSequence["DocumentInfo"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="DocumentInfo", + ) + panel: "PanelInfo" = proto.Field( + proto.MESSAGE, + number=11, + message="PanelInfo", + ) + search_info: "SearchInfo" = proto.Field( + proto.MESSAGE, + number=12, + message="SearchInfo", + ) + completion_info: "CompletionInfo" = proto.Field( + proto.MESSAGE, + number=13, + message="CompletionInfo", + ) + transaction_info: "TransactionInfo" = proto.Field( + proto.MESSAGE, + number=14, + message="TransactionInfo", + ) + tag_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=15, + ) + promotion_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=16, + ) + attributes: MutableMapping[str, common.CustomAttribute] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=17, + message=common.CustomAttribute, + ) + media_info: "MediaInfo" = proto.Field( + proto.MESSAGE, + number=18, + message="MediaInfo", + ) + + +class PageInfo(proto.Message): + r"""Detailed page information. + + Attributes: + pageview_id (str): + A unique ID of a web page view. + + This should be kept the same for all user events triggered + from the same pageview. For example, an item detail page + view could trigger multiple events as the user is browsing + the page. The ``pageViewId`` property should be kept the + same for all these events so that they can be grouped + together properly. + + When using the client side event reporting with JavaScript + pixel and Google Tag Manager, this value is filled in + automatically. + page_category (str): + The most specific category associated with a category page. + + To represent full path of category, use '>' sign to separate + different hierarchies. If '>' is part of the category name, + please replace it with other character(s). + + Category pages include special pages such as sales or + promotions. For instance, a special sale page may have the + category hierarchy: "pageCategory" : "Sales > 2017 Black + Friday Deals". + + Required for ``view-category-page`` events. Other event + types should not set this field. Otherwise, an + INVALID_ARGUMENT error is returned. + uri (str): + Complete URL (window.location.href) of the + user's current page. + When using the client side event reporting with + JavaScript pixel and Google Tag Manager, this + value is filled in automatically. Maximum length + 5,000 characters. + referrer_uri (str): + The referrer URL of the current page. + When using the client side event reporting with + JavaScript pixel and Google Tag Manager, this + value is filled in automatically. However, some + browser privacy restrictions may cause this + field to be empty. + """ + + pageview_id: str = proto.Field( + proto.STRING, + number=1, + ) + page_category: str = proto.Field( + proto.STRING, + number=2, + ) + uri: str = proto.Field( + proto.STRING, + number=3, + ) + referrer_uri: str = proto.Field( + proto.STRING, + number=4, + ) + + +class SearchInfo(proto.Message): + r"""Detailed search information. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + search_query (str): + The user's search query. + + See + [SearchRequest.query][google.cloud.discoveryengine.v1.SearchRequest.query] + for definition. + + The value must be a UTF-8 encoded string with a length limit + of 5,000 characters. Otherwise, an INVALID_ARGUMENT error is + returned. + + At least one of + [search_query][google.cloud.discoveryengine.v1.SearchInfo.search_query] + or + [PageInfo.page_category][google.cloud.discoveryengine.v1.PageInfo.page_category] + is required for ``search`` events. Other event types should + not set this field. Otherwise, an INVALID_ARGUMENT error is + returned. + order_by (str): + The order in which products are returned, if applicable. + + See [SearchRequest.order_by][] for definition and syntax. + + The value must be a UTF-8 encoded string with a length limit + of 1,000 characters. Otherwise, an INVALID_ARGUMENT error is + returned. + + This can only be set for ``search`` events. Other event + types should not set this field. Otherwise, an + INVALID_ARGUMENT error is returned. + offset (int): + An integer that specifies the current offset for pagination + (the 0-indexed starting location, amongst the products + deemed by the API as relevant). + + See + [SearchRequest.offset][google.cloud.discoveryengine.v1.SearchRequest.offset] + for definition. + + If this field is negative, an INVALID_ARGUMENT is returned. + + This can only be set for ``search`` events. Other event + types should not set this field. Otherwise, an + INVALID_ARGUMENT error is returned. + + This field is a member of `oneof`_ ``_offset``. + """ + + search_query: str = proto.Field( + proto.STRING, + number=1, + ) + order_by: str = proto.Field( + proto.STRING, + number=2, + ) + offset: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + + +class CompletionInfo(proto.Message): + r"""Detailed completion information including completion + attribution token and clicked completion info. + + Attributes: + selected_suggestion (str): + End user selected + [CompleteQueryResponse.CompletionResult.suggestion][]. + selected_position (int): + End user selected + [CompleteQueryResponse.CompletionResult.suggestion][] + position, starting from 0. + """ + + selected_suggestion: str = proto.Field( + proto.STRING, + number=1, + ) + selected_position: int = proto.Field( + proto.INT32, + number=2, + ) + + +class TransactionInfo(proto.Message): + r"""A transaction represents the entire purchase transaction. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (float): + Required. Total non-zero value associated + with the transaction. This value may include + shipping, tax, or other adjustments to the total + value that you want to include. + + This field is a member of `oneof`_ ``_value``. + currency (str): + Required. Currency code. Use three-character + ISO-4217 code. + transaction_id (str): + The transaction ID with a length limit of 128 + characters. + tax (float): + All the taxes associated with the + transaction. + + This field is a member of `oneof`_ ``_tax``. + cost (float): + All the costs associated with the products. These can be + manufacturing costs, shipping expenses not borne by the end + user, or any other costs, such that: + + - Profit = + [value][google.cloud.discoveryengine.v1.TransactionInfo.value] + - + [tax][google.cloud.discoveryengine.v1.TransactionInfo.tax] + - + [cost][google.cloud.discoveryengine.v1.TransactionInfo.cost] + + This field is a member of `oneof`_ ``_cost``. + discount_value (float): + The total discount(s) value applied to this transaction. + This figure should be excluded from + [TransactionInfo.value][google.cloud.discoveryengine.v1.TransactionInfo.value] + + For example, if a user paid + [TransactionInfo.value][google.cloud.discoveryengine.v1.TransactionInfo.value] + amount, then nominal (pre-discount) value of the transaction + is the sum of + [TransactionInfo.value][google.cloud.discoveryengine.v1.TransactionInfo.value] + and + [TransactionInfo.discount_value][google.cloud.discoveryengine.v1.TransactionInfo.discount_value] + + This means that profit is calculated the same way, + regardless of the discount value, and that + [TransactionInfo.discount_value][google.cloud.discoveryengine.v1.TransactionInfo.discount_value] + can be larger than + [TransactionInfo.value][google.cloud.discoveryengine.v1.TransactionInfo.value]: + + - Profit = + [value][google.cloud.discoveryengine.v1.TransactionInfo.value] + - + [tax][google.cloud.discoveryengine.v1.TransactionInfo.tax] + - + [cost][google.cloud.discoveryengine.v1.TransactionInfo.cost] + + This field is a member of `oneof`_ ``_discount_value``. + """ + + value: float = proto.Field( + proto.FLOAT, + number=1, + optional=True, + ) + currency: str = proto.Field( + proto.STRING, + number=2, + ) + transaction_id: str = proto.Field( + proto.STRING, + number=3, + ) + tax: float = proto.Field( + proto.FLOAT, + number=4, + optional=True, + ) + cost: float = proto.Field( + proto.FLOAT, + number=5, + optional=True, + ) + discount_value: float = proto.Field( + proto.FLOAT, + number=6, + optional=True, + ) + + +class DocumentInfo(proto.Message): + r"""Detailed document information associated with a user event. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + Required. The Document resource ID. + + This field is a member of `oneof`_ ``document_descriptor``. + name (str): + Required. The Document resource full name, of the form: + ``projects/{project_id}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}/branches/{branch_id}/documents/{document_id}`` + + This field is a member of `oneof`_ ``document_descriptor``. + quantity (int): + Quantity of the Document associated with the user event. + Defaults to 1. + + For example, this field will be 2 if two quantities of the + same Document are involved in a ``add-to-cart`` event. + + Required for events of the following event types: + + - ``add-to-cart`` + - ``purchase`` + + This field is a member of `oneof`_ ``_quantity``. + promotion_ids (MutableSequence[str]): + The promotion IDs associated with this + Document. Currently, this field is restricted to + at most one ID. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + oneof="document_descriptor", + ) + name: str = proto.Field( + proto.STRING, + number=2, + oneof="document_descriptor", + ) + quantity: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + promotion_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class PanelInfo(proto.Message): + r"""Detailed panel information associated with a user event. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + panel_id (str): + Required. The panel ID. + display_name (str): + The display name of the panel. + panel_position (int): + The ordered position of the panel, if shown to the user with + other panels. If set, then + [total_panels][google.cloud.discoveryengine.v1.PanelInfo.total_panels] + must also be set. + + This field is a member of `oneof`_ ``_panel_position``. + total_panels (int): + The total number of panels, including this one, shown to the + user. Must be set if + [panel_position][google.cloud.discoveryengine.v1.PanelInfo.panel_position] + is set. + + This field is a member of `oneof`_ ``_total_panels``. + """ + + panel_id: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + panel_position: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + total_panels: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + + +class MediaInfo(proto.Message): + r"""Media-specific user event information. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + media_progress_duration (google.protobuf.duration_pb2.Duration): + The media progress time in seconds, if applicable. For + example, if the end user has finished 90 seconds of a + playback video, then + [MediaInfo.media_progress_duration.seconds][Duration.seconds] + should be set to 90. + media_progress_percentage (float): + Media progress should be computed using only the + media_progress_duration relative to the media total length. + + This value must be between ``[0, 1.0]`` inclusive. + + If this is not a playback or the progress cannot be computed + (e.g. ongoing livestream), this field should be unset. + + This field is a member of `oneof`_ ``_media_progress_percentage``. + """ + + media_progress_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + media_progress_percentage: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event_service.py new file mode 100644 index 000000000000..f2cca982cb4f --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event_service.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import user_event as gcd_user_event + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "WriteUserEventRequest", + "CollectUserEventRequest", + }, +) + + +class WriteUserEventRequest(proto.Message): + r"""Request message for WriteUserEvent method. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The parent DataStore resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + user_event (google.cloud.discoveryengine_v1.types.UserEvent): + Required. User event to write. + + This field is a member of `oneof`_ ``_user_event``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + user_event: gcd_user_event.UserEvent = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=gcd_user_event.UserEvent, + ) + + +class CollectUserEventRequest(proto.Message): + r"""Request message for CollectUserEvent method. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The parent DataStore resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + user_event (str): + Required. URL encoded UserEvent proto with a + length limit of 2,000,000 characters. + uri (str): + The URL including cgi-parameters but + excluding the hash fragment with a length limit + of 5,000 characters. This is often more useful + than the referer URL, because many browsers only + send the domain for 3rd party requests. + + This field is a member of `oneof`_ ``_uri``. + ets (int): + The event timestamp in milliseconds. This + prevents browser caching of otherwise identical + get requests. The name is abbreviated to reduce + the payload bytes. + + This field is a member of `oneof`_ ``_ets``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + user_event: str = proto.Field( + proto.STRING, + number=2, + ) + uri: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + ets: int = proto.Field( + proto.INT64, + number=4, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py index 0e67f2597a78..3fa0b13298dc 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py @@ -13,21 +13,28 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.discoveryengine import gapic_version as package_version +from google.cloud.discoveryengine_v1beta import gapic_version as package_version __version__ = package_version.__version__ +from .services.completion_service import ( + CompletionServiceAsyncClient, + CompletionServiceClient, +) from .services.document_service import DocumentServiceAsyncClient, DocumentServiceClient from .services.recommendation_service import ( RecommendationServiceAsyncClient, RecommendationServiceClient, ) +from .services.schema_service import SchemaServiceAsyncClient, SchemaServiceClient +from .services.search_service import SearchServiceAsyncClient, SearchServiceClient from .services.user_event_service import ( UserEventServiceAsyncClient, UserEventServiceClient, ) -from .types.common import CustomAttribute, UserInfo +from .types.common import CustomAttribute, Interval, UserInfo +from .types.completion_service import CompleteQueryRequest, CompleteQueryResponse from .types.document import Document from .types.document_service import ( CreateDocumentRequest, @@ -48,7 +55,25 @@ ImportUserEventsRequest, ImportUserEventsResponse, ) +from .types.purge_config import ( + PurgeDocumentsMetadata, + PurgeDocumentsRequest, + PurgeDocumentsResponse, +) from .types.recommendation_service import RecommendRequest, RecommendResponse +from .types.schema import Schema +from .types.schema_service import ( + CreateSchemaMetadata, + CreateSchemaRequest, + DeleteSchemaMetadata, + DeleteSchemaRequest, + GetSchemaRequest, + ListSchemasRequest, + ListSchemasResponse, + UpdateSchemaMetadata, + UpdateSchemaRequest, +) +from .types.search_service import SearchRequest, SearchResponse from .types.user_event import ( CompletionInfo, DocumentInfo, @@ -62,20 +87,31 @@ from .types.user_event_service import CollectUserEventRequest, WriteUserEventRequest __all__ = ( + "CompletionServiceAsyncClient", "DocumentServiceAsyncClient", "RecommendationServiceAsyncClient", + "SchemaServiceAsyncClient", + "SearchServiceAsyncClient", "UserEventServiceAsyncClient", "BigQuerySource", "CollectUserEventRequest", + "CompleteQueryRequest", + "CompleteQueryResponse", "CompletionInfo", + "CompletionServiceClient", "CreateDocumentRequest", + "CreateSchemaMetadata", + "CreateSchemaRequest", "CustomAttribute", "DeleteDocumentRequest", + "DeleteSchemaMetadata", + "DeleteSchemaRequest", "Document", "DocumentInfo", "DocumentServiceClient", "GcsSource", "GetDocumentRequest", + "GetSchemaRequest", "ImportDocumentsMetadata", "ImportDocumentsRequest", "ImportDocumentsResponse", @@ -83,17 +119,30 @@ "ImportUserEventsMetadata", "ImportUserEventsRequest", "ImportUserEventsResponse", + "Interval", "ListDocumentsRequest", "ListDocumentsResponse", + "ListSchemasRequest", + "ListSchemasResponse", "MediaInfo", "PageInfo", "PanelInfo", + "PurgeDocumentsMetadata", + "PurgeDocumentsRequest", + "PurgeDocumentsResponse", "RecommendRequest", "RecommendResponse", "RecommendationServiceClient", + "Schema", + "SchemaServiceClient", "SearchInfo", + "SearchRequest", + "SearchResponse", + "SearchServiceClient", "TransactionInfo", "UpdateDocumentRequest", + "UpdateSchemaMetadata", + "UpdateSchemaRequest", "UserEvent", "UserEventServiceClient", "UserInfo", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json index 59f70d4c6c50..640b57adacf8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json @@ -5,6 +5,40 @@ "protoPackage": "google.cloud.discoveryengine.v1beta", "schema": "1.0", "services": { + "CompletionService": { + "clients": { + "grpc": { + "libraryClient": "CompletionServiceClient", + "rpcs": { + "CompleteQuery": { + "methods": [ + "complete_query" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CompletionServiceAsyncClient", + "rpcs": { + "CompleteQuery": { + "methods": [ + "complete_query" + ] + } + } + }, + "rest": { + "libraryClient": "CompletionServiceClient", + "rpcs": { + "CompleteQuery": { + "methods": [ + "complete_query" + ] + } + } + } + } + }, "DocumentService": { "clients": { "grpc": { @@ -35,6 +69,11 @@ "list_documents" ] }, + "PurgeDocuments": { + "methods": [ + "purge_documents" + ] + }, "UpdateDocument": { "methods": [ "update_document" @@ -70,6 +109,11 @@ "list_documents" ] }, + "PurgeDocuments": { + "methods": [ + "purge_documents" + ] + }, "UpdateDocument": { "methods": [ "update_document" @@ -105,6 +149,11 @@ "list_documents" ] }, + "PurgeDocuments": { + "methods": [ + "purge_documents" + ] + }, "UpdateDocument": { "methods": [ "update_document" @@ -148,6 +197,134 @@ } } }, + "SchemaService": { + "clients": { + "grpc": { + "libraryClient": "SchemaServiceClient", + "rpcs": { + "CreateSchema": { + "methods": [ + "create_schema" + ] + }, + "DeleteSchema": { + "methods": [ + "delete_schema" + ] + }, + "GetSchema": { + "methods": [ + "get_schema" + ] + }, + "ListSchemas": { + "methods": [ + "list_schemas" + ] + }, + "UpdateSchema": { + "methods": [ + "update_schema" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SchemaServiceAsyncClient", + "rpcs": { + "CreateSchema": { + "methods": [ + "create_schema" + ] + }, + "DeleteSchema": { + "methods": [ + "delete_schema" + ] + }, + "GetSchema": { + "methods": [ + "get_schema" + ] + }, + "ListSchemas": { + "methods": [ + "list_schemas" + ] + }, + "UpdateSchema": { + "methods": [ + "update_schema" + ] + } + } + }, + "rest": { + "libraryClient": "SchemaServiceClient", + "rpcs": { + "CreateSchema": { + "methods": [ + "create_schema" + ] + }, + "DeleteSchema": { + "methods": [ + "delete_schema" + ] + }, + "GetSchema": { + "methods": [ + "get_schema" + ] + }, + "ListSchemas": { + "methods": [ + "list_schemas" + ] + }, + "UpdateSchema": { + "methods": [ + "update_schema" + ] + } + } + } + } + }, + "SearchService": { + "clients": { + "grpc": { + "libraryClient": "SearchServiceClient", + "rpcs": { + "Search": { + "methods": [ + "search" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SearchServiceAsyncClient", + "rpcs": { + "Search": { + "methods": [ + "search" + ] + } + } + }, + "rest": { + "libraryClient": "SearchServiceClient", + "rpcs": { + "Search": { + "methods": [ + "search" + ] + } + } + } + } + }, "UserEventService": { "clients": { "grpc": { diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py index e94c45aea923..ba128f3c4137 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.2.0" # {x-release-please-version} +__version__ = "0.8.0" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/__init__.py new file mode 100644 index 000000000000..417d6a6a941c --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import CompletionServiceAsyncClient +from .client import CompletionServiceClient + +__all__ = ( + "CompletionServiceClient", + "CompletionServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/async_client.py new file mode 100644 index 000000000000..3bf044a19c6d --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/async_client.py @@ -0,0 +1,424 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 + +from google.cloud.discoveryengine_v1beta.types import completion_service + +from .client import CompletionServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, CompletionServiceTransport +from .transports.grpc_asyncio import CompletionServiceGrpcAsyncIOTransport + + +class CompletionServiceAsyncClient: + """Service for Auto-Completion.""" + + _client: CompletionServiceClient + + DEFAULT_ENDPOINT = CompletionServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CompletionServiceClient.DEFAULT_MTLS_ENDPOINT + + data_store_path = staticmethod(CompletionServiceClient.data_store_path) + parse_data_store_path = staticmethod(CompletionServiceClient.parse_data_store_path) + common_billing_account_path = staticmethod( + CompletionServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CompletionServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(CompletionServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + CompletionServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + CompletionServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + CompletionServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(CompletionServiceClient.common_project_path) + parse_common_project_path = staticmethod( + CompletionServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(CompletionServiceClient.common_location_path) + parse_common_location_path = staticmethod( + CompletionServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CompletionServiceAsyncClient: The constructed client. + """ + return CompletionServiceClient.from_service_account_info.__func__(CompletionServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CompletionServiceAsyncClient: The constructed client. + """ + return CompletionServiceClient.from_service_account_file.__func__(CompletionServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CompletionServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CompletionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CompletionServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(CompletionServiceClient).get_transport_class, type(CompletionServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CompletionServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the completion service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CompletionServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CompletionServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def complete_query( + self, + request: Optional[Union[completion_service.CompleteQueryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> completion_service.CompleteQueryResponse: + r"""Completes the specified user input with keyword + suggestions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_complete_query(): + # Create a client + client = discoveryengine_v1beta.CompletionServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.CompleteQueryRequest( + data_store="data_store_value", + query="query_value", + ) + + # Make the request + response = await client.complete_query(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.CompleteQueryRequest, dict]]): + The request object. Request message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1beta.CompletionService.CompleteQuery] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.CompleteQueryResponse: + Response message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1beta.CompletionService.CompleteQuery] + method. + + """ + # Create or coerce a protobuf request object. + request = completion_service.CompleteQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.complete_query, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CompletionServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py new file mode 100644 index 000000000000..d7017f598343 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py @@ -0,0 +1,661 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 + +from google.cloud.discoveryengine_v1beta.types import completion_service + +from .transports.base import DEFAULT_CLIENT_INFO, CompletionServiceTransport +from .transports.grpc import CompletionServiceGrpcTransport +from .transports.grpc_asyncio import CompletionServiceGrpcAsyncIOTransport +from .transports.rest import CompletionServiceRestTransport + + +class CompletionServiceClientMeta(type): + """Metaclass for the CompletionService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[CompletionServiceTransport]] + _transport_registry["grpc"] = CompletionServiceGrpcTransport + _transport_registry["grpc_asyncio"] = CompletionServiceGrpcAsyncIOTransport + _transport_registry["rest"] = CompletionServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CompletionServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CompletionServiceClient(metaclass=CompletionServiceClientMeta): + """Service for Auto-Completion.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CompletionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CompletionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CompletionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CompletionServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CompletionServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the completion service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CompletionServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CompletionServiceTransport): + # transport is a CompletionServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def complete_query( + self, + request: Optional[Union[completion_service.CompleteQueryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> completion_service.CompleteQueryResponse: + r"""Completes the specified user input with keyword + suggestions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_complete_query(): + # Create a client + client = discoveryengine_v1beta.CompletionServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.CompleteQueryRequest( + data_store="data_store_value", + query="query_value", + ) + + # Make the request + response = client.complete_query(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.CompleteQueryRequest, dict]): + The request object. Request message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1beta.CompletionService.CompleteQuery] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.CompleteQueryResponse: + Response message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1beta.CompletionService.CompleteQuery] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a completion_service.CompleteQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, completion_service.CompleteQueryRequest): + request = completion_service.CompleteQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.complete_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CompletionServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CompletionServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/__init__.py new file mode 100644 index 000000000000..b0fd290e6843 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CompletionServiceTransport +from .grpc import CompletionServiceGrpcTransport +from .grpc_asyncio import CompletionServiceGrpcAsyncIOTransport +from .rest import CompletionServiceRestInterceptor, CompletionServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CompletionServiceTransport]] +_transport_registry["grpc"] = CompletionServiceGrpcTransport +_transport_registry["grpc_asyncio"] = CompletionServiceGrpcAsyncIOTransport +_transport_registry["rest"] = CompletionServiceRestTransport + +__all__ = ( + "CompletionServiceTransport", + "CompletionServiceGrpcTransport", + "CompletionServiceGrpcAsyncIOTransport", + "CompletionServiceRestTransport", + "CompletionServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/base.py new file mode 100644 index 000000000000..30b669415634 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/base.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version +from google.cloud.discoveryengine_v1beta.types import completion_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class CompletionServiceTransport(abc.ABC): + """Abstract transport class for CompletionService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.complete_query: gapic_v1.method.wrap_method( + self.complete_query, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def complete_query( + self, + ) -> Callable[ + [completion_service.CompleteQueryRequest], + Union[ + completion_service.CompleteQueryResponse, + Awaitable[completion_service.CompleteQueryResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("CompletionServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc.py new file mode 100644 index 000000000000..81a2c620d1ba --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc.py @@ -0,0 +1,306 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 +import grpc # type: ignore + +from google.cloud.discoveryengine_v1beta.types import completion_service + +from .base import DEFAULT_CLIENT_INFO, CompletionServiceTransport + + +class CompletionServiceGrpcTransport(CompletionServiceTransport): + """gRPC backend transport for CompletionService. + + Service for Auto-Completion. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def complete_query( + self, + ) -> Callable[ + [completion_service.CompleteQueryRequest], + completion_service.CompleteQueryResponse, + ]: + r"""Return a callable for the complete query method over gRPC. + + Completes the specified user input with keyword + suggestions. + + Returns: + Callable[[~.CompleteQueryRequest], + ~.CompleteQueryResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "complete_query" not in self._stubs: + self._stubs["complete_query"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.CompletionService/CompleteQuery", + request_serializer=completion_service.CompleteQueryRequest.serialize, + response_deserializer=completion_service.CompleteQueryResponse.deserialize, + ) + return self._stubs["complete_query"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("CompletionServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a133a8afcdbf --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc_asyncio.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1beta.types import completion_service + +from .base import DEFAULT_CLIENT_INFO, CompletionServiceTransport +from .grpc import CompletionServiceGrpcTransport + + +class CompletionServiceGrpcAsyncIOTransport(CompletionServiceTransport): + """gRPC AsyncIO backend transport for CompletionService. + + Service for Auto-Completion. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def complete_query( + self, + ) -> Callable[ + [completion_service.CompleteQueryRequest], + Awaitable[completion_service.CompleteQueryResponse], + ]: + r"""Return a callable for the complete query method over gRPC. + + Completes the specified user input with keyword + suggestions. + + Returns: + Callable[[~.CompleteQueryRequest], + Awaitable[~.CompleteQueryResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "complete_query" not in self._stubs: + self._stubs["complete_query"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.CompletionService/CompleteQuery", + request_serializer=completion_service.CompleteQueryRequest.serialize, + response_deserializer=completion_service.CompleteQueryResponse.deserialize, + ) + return self._stubs["complete_query"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("CompletionServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/rest.py new file mode 100644 index 000000000000..39c5d4e7d2b0 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/rest.py @@ -0,0 +1,569 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.discoveryengine_v1beta.types import completion_service + +from .base import CompletionServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CompletionServiceRestInterceptor: + """Interceptor for CompletionService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CompletionServiceRestTransport. + + .. code-block:: python + class MyCustomCompletionServiceInterceptor(CompletionServiceRestInterceptor): + def pre_complete_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_complete_query(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CompletionServiceRestTransport(interceptor=MyCustomCompletionServiceInterceptor()) + client = CompletionServiceClient(transport=transport) + + + """ + + def pre_complete_query( + self, + request: completion_service.CompleteQueryRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[completion_service.CompleteQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for complete_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the CompletionService server. + """ + return request, metadata + + def post_complete_query( + self, response: completion_service.CompleteQueryResponse + ) -> completion_service.CompleteQueryResponse: + """Post-rpc interceptor for complete_query + + Override in a subclass to manipulate the response + after it is returned by the CompletionService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CompletionService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the CompletionService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CompletionService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the CompletionService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CompletionServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CompletionServiceRestInterceptor + + +class CompletionServiceRestTransport(CompletionServiceTransport): + """REST backend transport for CompletionService. + + Service for Auto-Completion. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CompletionServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CompletionServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CompleteQuery(CompletionServiceRestStub): + def __hash__(self): + return hash("CompleteQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "query": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: completion_service.CompleteQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> completion_service.CompleteQueryResponse: + r"""Call the complete query method over HTTP. + + Args: + request (~.completion_service.CompleteQueryRequest): + The request object. Request message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1beta.CompletionService.CompleteQuery] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.completion_service.CompleteQueryResponse: + Response message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1beta.CompletionService.CompleteQuery] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{data_store=projects/*/locations/*/dataStores/*}:completeQuery", + }, + { + "method": "get", + "uri": "/v1beta/{data_store=projects/*/locations/*/collections/*/dataStores/*}:completeQuery", + }, + ] + request, metadata = self._interceptor.pre_complete_query(request, metadata) + pb_request = completion_service.CompleteQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = completion_service.CompleteQueryResponse() + pb_resp = completion_service.CompleteQueryResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_complete_query(resp) + return resp + + @property + def complete_query( + self, + ) -> Callable[ + [completion_service.CompleteQueryRequest], + completion_service.CompleteQueryResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CompleteQuery(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(CompletionServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(CompletionServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CompletionServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py index 22e519034ab4..e66c9a467cc3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py @@ -48,7 +48,11 @@ from google.protobuf import struct_pb2 # type: ignore from google.cloud.discoveryengine_v1beta.services.document_service import pagers -from google.cloud.discoveryengine_v1beta.types import document_service, import_config +from google.cloud.discoveryengine_v1beta.types import ( + document_service, + import_config, + purge_config, +) from google.cloud.discoveryengine_v1beta.types import document from google.cloud.discoveryengine_v1beta.types import document as gcd_document @@ -272,16 +276,16 @@ async def sample_get_document(): Required. Full resource name of [Document][google.cloud.discoveryengine.v1beta.Document], such as - ``projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}``. If the caller does not have permission to access the [Document][google.cloud.discoveryengine.v1beta.Document], regardless of whether or not it exists, a - PERMISSION_DENIED error is returned. + ``PERMISSION_DENIED`` error is returned. If the requested [Document][google.cloud.discoveryengine.v1beta.Document] - does not exist, a NOT_FOUND error is returned. + does not exist, a ``NOT_FOUND`` error is returned. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -387,14 +391,14 @@ async def sample_list_documents(): method. parent (:class:`str`): Required. The parent branch resource name, such as - ``projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. Use ``default_branch`` as the branch ID, to list documents under the default branch. If the caller does not have permission to list [Documents][]s under this branch, regardless of whether - or not this branch exists, a PERMISSION_DENIED error is - returned. + or not this branch exists, a ``PERMISSION_DENIED`` error + is returned. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -496,12 +500,8 @@ async def sample_create_document(): client = discoveryengine_v1beta.DocumentServiceAsyncClient() # Initialize request argument(s) - document = discoveryengine_v1beta.Document() - document.schema_id = "schema_id_value" - request = discoveryengine_v1beta.CreateDocumentRequest( parent="parent_value", - document=document, document_id="document_id_value", ) @@ -518,7 +518,7 @@ async def sample_create_document(): method. parent (:class:`str`): Required. The parent resource name, such as - ``projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -540,18 +540,18 @@ async def sample_create_document(): If the caller does not have permission to create the [Document][google.cloud.discoveryengine.v1beta.Document], regardless of whether or not it exists, a - PERMISSION_DENIED error is returned. + ``PERMISSION_DENIED`` error is returned. This field must be unique among all [Document][google.cloud.discoveryengine.v1beta.Document]s with the same [parent][google.cloud.discoveryengine.v1beta.CreateDocumentRequest.parent]. - Otherwise, an ALREADY_EXISTS error is returned. + Otherwise, an ``ALREADY_EXISTS`` error is returned. This field must conform to `RFC-1034 `__ standard with a length limit of 63 characters. - Otherwise, an INVALID_ARGUMENT error is returned. + Otherwise, an ``INVALID_ARGUMENT`` error is returned. This corresponds to the ``document_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -642,11 +642,7 @@ async def sample_update_document(): client = discoveryengine_v1beta.DocumentServiceAsyncClient() # Initialize request argument(s) - document = discoveryengine_v1beta.Document() - document.schema_id = "schema_id_value" - request = discoveryengine_v1beta.UpdateDocumentRequest( - document=document, ) # Make the request @@ -747,16 +743,17 @@ async def sample_delete_document(): Required. Full resource name of [Document][google.cloud.discoveryengine.v1beta.Document], such as - ``projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}``. If the caller does not have permission to delete the [Document][google.cloud.discoveryengine.v1beta.Document], regardless of whether or not it exists, a - PERMISSION_DENIED error is returned. + ``PERMISSION_DENIED`` error is returned. If the [Document][google.cloud.discoveryengine.v1beta.Document] - to delete does not exist, a NOT_FOUND error is returned. + to delete does not exist, a ``NOT_FOUND`` error is + returned. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -839,11 +836,7 @@ async def sample_import_documents(): client = discoveryengine_v1beta.DocumentServiceAsyncClient() # Initialize request argument(s) - inline_source = discoveryengine_v1beta.InlineSource() - inline_source.documents.schema_id = "schema_id_value" - request = discoveryengine_v1beta.ImportDocumentsRequest( - inline_source=inline_source, parent="parent_value", ) @@ -923,6 +916,123 @@ async def sample_import_documents(): # Done; return the response. return response + async def purge_documents( + self, + request: Optional[Union[purge_config.PurgeDocumentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Permanently deletes all selected + [Document][google.cloud.discoveryengine.v1beta.Document]s in a + branch. + + This process is asynchronous. Depending on the number of + [Document][google.cloud.discoveryengine.v1beta.Document]s to be + deleted, this operation can take hours to complete. Before the + delete operation completes, some + [Document][google.cloud.discoveryengine.v1beta.Document]s might + still be returned by + [DocumentService.GetDocument][google.cloud.discoveryengine.v1beta.DocumentService.GetDocument] + or + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1beta.DocumentService.ListDocuments]. + + To get a list of the + [Document][google.cloud.discoveryengine.v1beta.Document]s to be + deleted, set + [PurgeDocumentsRequest.force][google.cloud.discoveryengine.v1beta.PurgeDocumentsRequest.force] + to false. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_purge_documents(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.PurgeDocumentsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_documents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.PurgeDocumentsRequest, dict]]): + The request object. Request message for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.PurgeDocumentsResponse` Response message for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments] + method. If the long running operation is successfully + done, then this message is returned by the + google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + request = purge_config.PurgeDocumentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.purge_documents, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + purge_config.PurgeDocumentsResponse, + metadata_type=purge_config.PurgeDocumentsMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py index c19d4fe79c98..995b668f9278 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py @@ -52,7 +52,11 @@ from google.protobuf import struct_pb2 # type: ignore from google.cloud.discoveryengine_v1beta.services.document_service import pagers -from google.cloud.discoveryengine_v1beta.types import document_service, import_config +from google.cloud.discoveryengine_v1beta.types import ( + document_service, + import_config, + purge_config, +) from google.cloud.discoveryengine_v1beta.types import document from google.cloud.discoveryengine_v1beta.types import document as gcd_document @@ -527,16 +531,16 @@ def sample_get_document(): Required. Full resource name of [Document][google.cloud.discoveryengine.v1beta.Document], such as - ``projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}``. If the caller does not have permission to access the [Document][google.cloud.discoveryengine.v1beta.Document], regardless of whether or not it exists, a - PERMISSION_DENIED error is returned. + ``PERMISSION_DENIED`` error is returned. If the requested [Document][google.cloud.discoveryengine.v1beta.Document] - does not exist, a NOT_FOUND error is returned. + does not exist, a ``NOT_FOUND`` error is returned. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -642,14 +646,14 @@ def sample_list_documents(): method. parent (str): Required. The parent branch resource name, such as - ``projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. Use ``default_branch`` as the branch ID, to list documents under the default branch. If the caller does not have permission to list [Documents][]s under this branch, regardless of whether - or not this branch exists, a PERMISSION_DENIED error is - returned. + or not this branch exists, a ``PERMISSION_DENIED`` error + is returned. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -751,12 +755,8 @@ def sample_create_document(): client = discoveryengine_v1beta.DocumentServiceClient() # Initialize request argument(s) - document = discoveryengine_v1beta.Document() - document.schema_id = "schema_id_value" - request = discoveryengine_v1beta.CreateDocumentRequest( parent="parent_value", - document=document, document_id="document_id_value", ) @@ -773,7 +773,7 @@ def sample_create_document(): method. parent (str): Required. The parent resource name, such as - ``projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -795,18 +795,18 @@ def sample_create_document(): If the caller does not have permission to create the [Document][google.cloud.discoveryengine.v1beta.Document], regardless of whether or not it exists, a - PERMISSION_DENIED error is returned. + ``PERMISSION_DENIED`` error is returned. This field must be unique among all [Document][google.cloud.discoveryengine.v1beta.Document]s with the same [parent][google.cloud.discoveryengine.v1beta.CreateDocumentRequest.parent]. - Otherwise, an ALREADY_EXISTS error is returned. + Otherwise, an ``ALREADY_EXISTS`` error is returned. This field must conform to `RFC-1034 `__ standard with a length limit of 63 characters. - Otherwise, an INVALID_ARGUMENT error is returned. + Otherwise, an ``INVALID_ARGUMENT`` error is returned. This corresponds to the ``document_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -897,11 +897,7 @@ def sample_update_document(): client = discoveryengine_v1beta.DocumentServiceClient() # Initialize request argument(s) - document = discoveryengine_v1beta.Document() - document.schema_id = "schema_id_value" - request = discoveryengine_v1beta.UpdateDocumentRequest( - document=document, ) # Make the request @@ -1003,16 +999,17 @@ def sample_delete_document(): Required. Full resource name of [Document][google.cloud.discoveryengine.v1beta.Document], such as - ``projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}``. If the caller does not have permission to delete the [Document][google.cloud.discoveryengine.v1beta.Document], regardless of whether or not it exists, a - PERMISSION_DENIED error is returned. + ``PERMISSION_DENIED`` error is returned. If the [Document][google.cloud.discoveryengine.v1beta.Document] - to delete does not exist, a NOT_FOUND error is returned. + to delete does not exist, a ``NOT_FOUND`` error is + returned. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1095,11 +1092,7 @@ def sample_import_documents(): client = discoveryengine_v1beta.DocumentServiceClient() # Initialize request argument(s) - inline_source = discoveryengine_v1beta.InlineSource() - inline_source.documents.schema_id = "schema_id_value" - request = discoveryengine_v1beta.ImportDocumentsRequest( - inline_source=inline_source, parent="parent_value", ) @@ -1171,6 +1164,124 @@ def sample_import_documents(): # Done; return the response. return response + def purge_documents( + self, + request: Optional[Union[purge_config.PurgeDocumentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Permanently deletes all selected + [Document][google.cloud.discoveryengine.v1beta.Document]s in a + branch. + + This process is asynchronous. Depending on the number of + [Document][google.cloud.discoveryengine.v1beta.Document]s to be + deleted, this operation can take hours to complete. Before the + delete operation completes, some + [Document][google.cloud.discoveryengine.v1beta.Document]s might + still be returned by + [DocumentService.GetDocument][google.cloud.discoveryengine.v1beta.DocumentService.GetDocument] + or + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1beta.DocumentService.ListDocuments]. + + To get a list of the + [Document][google.cloud.discoveryengine.v1beta.Document]s to be + deleted, set + [PurgeDocumentsRequest.force][google.cloud.discoveryengine.v1beta.PurgeDocumentsRequest.force] + to false. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_purge_documents(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.PurgeDocumentsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_documents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.PurgeDocumentsRequest, dict]): + The request object. Request message for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.PurgeDocumentsResponse` Response message for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments] + method. If the long running operation is successfully + done, then this message is returned by the + google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a purge_config.PurgeDocumentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, purge_config.PurgeDocumentsRequest): + request = purge_config.PurgeDocumentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_documents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + purge_config.PurgeDocumentsResponse, + metadata_type=purge_config.PurgeDocumentsMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "DocumentServiceClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py index d52736387fab..691ffec39777 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py @@ -27,7 +27,11 @@ from google.protobuf import empty_pb2 # type: ignore from google.cloud.discoveryengine_v1beta import gapic_version as package_version -from google.cloud.discoveryengine_v1beta.types import document_service, import_config +from google.cloud.discoveryengine_v1beta.types import ( + document_service, + import_config, + purge_config, +) from google.cloud.discoveryengine_v1beta.types import document from google.cloud.discoveryengine_v1beta.types import document as gcd_document @@ -165,6 +169,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=300.0, client_info=client_info, ), + self.purge_documents: gapic_v1.method.wrap_method( + self.purge_documents, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -238,6 +247,15 @@ def import_documents( ]: raise NotImplementedError() + @property + def purge_documents( + self, + ) -> Callable[ + [purge_config.PurgeDocumentsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py index 145fee451034..9057a0a8de8e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py @@ -24,7 +24,11 @@ from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore -from google.cloud.discoveryengine_v1beta.types import document_service, import_config +from google.cloud.discoveryengine_v1beta.types import ( + document_service, + import_config, + purge_config, +) from google.cloud.discoveryengine_v1beta.types import document from google.cloud.discoveryengine_v1beta.types import document as gcd_document @@ -418,6 +422,50 @@ def import_documents( ) return self._stubs["import_documents"] + @property + def purge_documents( + self, + ) -> Callable[[purge_config.PurgeDocumentsRequest], operations_pb2.Operation]: + r"""Return a callable for the purge documents method over gRPC. + + Permanently deletes all selected + [Document][google.cloud.discoveryengine.v1beta.Document]s in a + branch. + + This process is asynchronous. Depending on the number of + [Document][google.cloud.discoveryengine.v1beta.Document]s to be + deleted, this operation can take hours to complete. Before the + delete operation completes, some + [Document][google.cloud.discoveryengine.v1beta.Document]s might + still be returned by + [DocumentService.GetDocument][google.cloud.discoveryengine.v1beta.DocumentService.GetDocument] + or + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1beta.DocumentService.ListDocuments]. + + To get a list of the + [Document][google.cloud.discoveryengine.v1beta.Document]s to be + deleted, set + [PurgeDocumentsRequest.force][google.cloud.discoveryengine.v1beta.PurgeDocumentsRequest.force] + to false. + + Returns: + Callable[[~.PurgeDocumentsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_documents" not in self._stubs: + self._stubs["purge_documents"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DocumentService/PurgeDocuments", + request_serializer=purge_config.PurgeDocumentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_documents"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py index 0ff5d56a54a9..7240c544a2a6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py @@ -24,7 +24,11 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.discoveryengine_v1beta.types import document_service, import_config +from google.cloud.discoveryengine_v1beta.types import ( + document_service, + import_config, + purge_config, +) from google.cloud.discoveryengine_v1beta.types import document from google.cloud.discoveryengine_v1beta.types import document as gcd_document @@ -430,6 +434,52 @@ def import_documents( ) return self._stubs["import_documents"] + @property + def purge_documents( + self, + ) -> Callable[ + [purge_config.PurgeDocumentsRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the purge documents method over gRPC. + + Permanently deletes all selected + [Document][google.cloud.discoveryengine.v1beta.Document]s in a + branch. + + This process is asynchronous. Depending on the number of + [Document][google.cloud.discoveryengine.v1beta.Document]s to be + deleted, this operation can take hours to complete. Before the + delete operation completes, some + [Document][google.cloud.discoveryengine.v1beta.Document]s might + still be returned by + [DocumentService.GetDocument][google.cloud.discoveryengine.v1beta.DocumentService.GetDocument] + or + [DocumentService.ListDocuments][google.cloud.discoveryengine.v1beta.DocumentService.ListDocuments]. + + To get a list of the + [Document][google.cloud.discoveryengine.v1beta.Document]s to be + deleted, set + [PurgeDocumentsRequest.force][google.cloud.discoveryengine.v1beta.PurgeDocumentsRequest.force] + to false. + + Returns: + Callable[[~.PurgeDocumentsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_documents" not in self._stubs: + self._stubs["purge_documents"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DocumentService/PurgeDocuments", + request_serializer=purge_config.PurgeDocumentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_documents"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py index 027d3fb7ed4f..66a90f312aa1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py @@ -17,7 +17,7 @@ import dataclasses import json # type: ignore import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings from google.api_core import ( @@ -46,7 +46,11 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore -from google.cloud.discoveryengine_v1beta.types import document_service, import_config +from google.cloud.discoveryengine_v1beta.types import ( + document_service, + import_config, + purge_config, +) from google.cloud.discoveryengine_v1beta.types import document from google.cloud.discoveryengine_v1beta.types import document as gcd_document @@ -111,6 +115,14 @@ def post_list_documents(self, response): logging.log(f"Received response: {response}") return response + def pre_purge_documents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_purge_documents(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -227,6 +239,29 @@ def post_list_documents( """ return response + def pre_purge_documents( + self, + request: purge_config.PurgeDocumentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[purge_config.PurgeDocumentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for purge_documents + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_purge_documents( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for purge_documents + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + def pre_update_document( self, request: document_service.UpdateDocumentRequest, @@ -252,7 +287,7 @@ def pre_get_operation( self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]], - ) -> operations_pb2.Operation: + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -261,7 +296,7 @@ def pre_get_operation( return request, metadata def post_get_operation( - self, response: operations_pb2.GetOperationRequest + self, response: operations_pb2.Operation ) -> operations_pb2.Operation: """Post-rpc interceptor for get_operation @@ -275,7 +310,7 @@ def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]], - ) -> operations_pb2.ListOperationsResponse: + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -284,7 +319,7 @@ def pre_list_operations( return request, metadata def post_list_operations( - self, response: operations_pb2.ListOperationsRequest + self, response: operations_pb2.ListOperationsResponse ) -> operations_pb2.ListOperationsResponse: """Post-rpc interceptor for list_operations @@ -405,6 +440,26 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", @@ -427,6 +482,26 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, ], "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", @@ -470,7 +545,7 @@ class _CreateDocument(DocumentServiceRestStub): def __hash__(self): return hash("CreateDocument") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "documentId": "", } @@ -497,7 +572,6 @@ def __call__( The request object. Request message for [DocumentService.CreateDocument][google.cloud.discoveryengine.v1beta.DocumentService.CreateDocument] method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -518,6 +592,11 @@ def __call__( "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents", "body": "document", }, + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/documents", + "body": "document", + }, ] request, metadata = self._interceptor.pre_create_document(request, metadata) pb_request = document_service.CreateDocumentRequest.pb(request) @@ -573,7 +652,7 @@ class _DeleteDocument(DocumentServiceRestStub): def __hash__(self): return hash("DeleteDocument") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -598,7 +677,6 @@ def __call__( The request object. Request message for [DocumentService.DeleteDocument][google.cloud.discoveryengine.v1beta.DocumentService.DeleteDocument] method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -611,6 +689,10 @@ def __call__( "method": "delete", "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}", }, + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*}", + }, ] request, metadata = self._interceptor.pre_delete_document(request, metadata) pb_request = document_service.DeleteDocumentRequest.pb(request) @@ -650,7 +732,7 @@ class _GetDocument(DocumentServiceRestStub): def __hash__(self): return hash("GetDocument") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -675,7 +757,6 @@ def __call__( The request object. Request message for [DocumentService.GetDocument][google.cloud.discoveryengine.v1beta.DocumentService.GetDocument] method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -695,6 +776,10 @@ def __call__( "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}", }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*}", + }, ] request, metadata = self._interceptor.pre_get_document(request, metadata) pb_request = document_service.GetDocumentRequest.pb(request) @@ -742,7 +827,7 @@ class _ImportDocuments(DocumentServiceRestStub): def __hash__(self): return hash("ImportDocuments") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -785,6 +870,11 @@ def __call__( "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents:import", "body": "*", }, + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/documents:import", + "body": "*", + }, ] request, metadata = self._interceptor.pre_import_documents( request, metadata @@ -840,7 +930,7 @@ class _ListDocuments(DocumentServiceRestStub): def __hash__(self): return hash("ListDocuments") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -865,7 +955,6 @@ def __call__( The request object. Request message for [DocumentService.ListDocuments][google.cloud.discoveryengine.v1beta.DocumentService.ListDocuments] method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -885,6 +974,10 @@ def __call__( "method": "get", "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents", }, + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/documents", + }, ] request, metadata = self._interceptor.pre_list_documents(request, metadata) pb_request = document_service.ListDocumentsRequest.pb(request) @@ -928,11 +1021,114 @@ def __call__( resp = self._interceptor.post_list_documents(resp) return resp + class _PurgeDocuments(DocumentServiceRestStub): + def __hash__(self): + return hash("PurgeDocuments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: purge_config.PurgeDocumentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the purge documents method over HTTP. + + Args: + request (~.purge_config.PurgeDocumentsRequest): + The request object. Request message for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents:purge", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/documents:purge", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_purge_documents(request, metadata) + pb_request = purge_config.PurgeDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_purge_documents(resp) + return resp + class _UpdateDocument(DocumentServiceRestStub): def __hash__(self): return hash("UpdateDocument") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -957,7 +1153,6 @@ def __call__( The request object. Request message for [DocumentService.UpdateDocument][google.cloud.discoveryengine.v1beta.DocumentService.UpdateDocument] method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -978,6 +1173,11 @@ def __call__( "uri": "/v1beta/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}", "body": "document", }, + { + "method": "patch", + "uri": "/v1beta/{document.name=projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*}", + "body": "document", + }, ] request, metadata = self._interceptor.pre_update_document(request, metadata) pb_request = document_service.UpdateDocumentRequest.pb(request) @@ -1071,6 +1271,14 @@ def list_documents( # In C++ this would require a dynamic_cast return self._ListDocuments(self._session, self._host, self._interceptor) # type: ignore + @property + def purge_documents( + self, + ) -> Callable[[purge_config.PurgeDocumentsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PurgeDocuments(self._session, self._host, self._interceptor) # type: ignore + @property def update_document( self, @@ -1109,6 +1317,26 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", @@ -1192,6 +1420,26 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/async_client.py index 428ff9b17842..2c6a17661c40 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/async_client.py @@ -266,8 +266,7 @@ async def sample_recommend(): Args: request (Optional[Union[google.cloud.discoveryengine_v1beta.types.RecommendRequest, dict]]): - The request object. Request message for Recommend - method. + The request object. Request message for Recommend method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py index 12e5281d3c1e..35a50e4bc84e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py @@ -516,8 +516,7 @@ def sample_recommend(): Args: request (Union[google.cloud.discoveryengine_v1beta.types.RecommendRequest, dict]): - The request object. Request message for Recommend - method. + The request object. Request message for Recommend method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py index 643bf09a1f05..23889189f8be 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py @@ -17,7 +17,7 @@ import dataclasses import json # type: ignore import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming @@ -105,7 +105,7 @@ def pre_get_operation( self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]], - ) -> operations_pb2.Operation: + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -114,7 +114,7 @@ def pre_get_operation( return request, metadata def post_get_operation( - self, response: operations_pb2.GetOperationRequest + self, response: operations_pb2.Operation ) -> operations_pb2.Operation: """Post-rpc interceptor for get_operation @@ -128,7 +128,7 @@ def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]], - ) -> operations_pb2.ListOperationsResponse: + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -137,7 +137,7 @@ def pre_list_operations( return request, metadata def post_list_operations( - self, response: operations_pb2.ListOperationsRequest + self, response: operations_pb2.ListOperationsResponse ) -> operations_pb2.ListOperationsResponse: """Post-rpc interceptor for list_operations @@ -248,7 +248,7 @@ class _Recommend(RecommendationServiceRestStub): def __hash__(self): return hash("Recommend") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -290,6 +290,11 @@ def __call__( "uri": "/v1beta/{serving_config=projects/*/locations/*/dataStores/*/servingConfigs/*}:recommend", "body": "*", }, + { + "method": "post", + "uri": "/v1beta/{serving_config=projects/*/locations/*/collections/*/dataStores/*/servingConfigs/*}:recommend", + "body": "*", + }, ] request, metadata = self._interceptor.pre_recommend(request, metadata) pb_request = recommendation_service.RecommendRequest.pb(request) @@ -382,6 +387,26 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", @@ -465,6 +490,26 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/__init__.py new file mode 100644 index 000000000000..d61eb9a130a6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SchemaServiceAsyncClient +from .client import SchemaServiceClient + +__all__ = ( + "SchemaServiceClient", + "SchemaServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/async_client.py new file mode 100644 index 000000000000..110c0aa690c4 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/async_client.py @@ -0,0 +1,944 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.services.schema_service import pagers +from google.cloud.discoveryengine_v1beta.types import schema +from google.cloud.discoveryengine_v1beta.types import schema as gcd_schema +from google.cloud.discoveryengine_v1beta.types import schema_service + +from .client import SchemaServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, SchemaServiceTransport +from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport + + +class SchemaServiceAsyncClient: + """Service for managing + [Schema][google.cloud.discoveryengine.v1beta.Schema]s. + """ + + _client: SchemaServiceClient + + DEFAULT_ENDPOINT = SchemaServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SchemaServiceClient.DEFAULT_MTLS_ENDPOINT + + data_store_path = staticmethod(SchemaServiceClient.data_store_path) + parse_data_store_path = staticmethod(SchemaServiceClient.parse_data_store_path) + schema_path = staticmethod(SchemaServiceClient.schema_path) + parse_schema_path = staticmethod(SchemaServiceClient.parse_schema_path) + common_billing_account_path = staticmethod( + SchemaServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SchemaServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SchemaServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SchemaServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SchemaServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SchemaServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(SchemaServiceClient.common_project_path) + parse_common_project_path = staticmethod( + SchemaServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(SchemaServiceClient.common_location_path) + parse_common_location_path = staticmethod( + SchemaServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceAsyncClient: The constructed client. + """ + return SchemaServiceClient.from_service_account_info.__func__(SchemaServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceAsyncClient: The constructed client. + """ + return SchemaServiceClient.from_service_account_file.__func__(SchemaServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SchemaServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SchemaServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SchemaServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(SchemaServiceClient).get_transport_class, type(SchemaServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SchemaServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the schema service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SchemaServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SchemaServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_schema( + self, + request: Optional[Union[schema_service.GetSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Gets a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_get_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = await client.get_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.GetSchemaRequest, dict]]): + The request object. Request message for + [SchemaService.GetSchema][google.cloud.discoveryengine.v1beta.SchemaService.GetSchema] + method. + name (:class:`str`): + Required. The full resource name of the schema, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.Schema: + Defines the structure and layout of a + type of document data. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = schema_service.GetSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_schemas( + self, + request: Optional[Union[schema_service.ListSchemasRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSchemasAsyncPager: + r"""Gets a list of + [Schema][google.cloud.discoveryengine.v1beta.Schema]s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_list_schemas(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.ListSchemasRequest, dict]]): + The request object. Request message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas] + method. + parent (:class:`str`): + Required. The parent data store resource name, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.schema_service.pagers.ListSchemasAsyncPager: + Response message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = schema_service.ListSchemasRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_schemas, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSchemasAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_schema( + self, + request: Optional[Union[schema_service.CreateSchemaRequest, dict]] = None, + *, + parent: Optional[str] = None, + schema: Optional[gcd_schema.Schema] = None, + schema_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_create_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.CreateSchemaRequest( + parent="parent_value", + schema_id="schema_id_value", + ) + + # Make the request + operation = client.create_schema(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.CreateSchemaRequest, dict]]): + The request object. Request message for + [SchemaService.CreateSchema][google.cloud.discoveryengine.v1beta.SchemaService.CreateSchema] + method. + parent (:class:`str`): + Required. The parent data store resource name, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (:class:`google.cloud.discoveryengine_v1beta.types.Schema`): + Required. The + [Schema][google.cloud.discoveryengine.v1beta.Schema] to + create. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema_id (:class:`str`): + Required. The ID to use for the + [Schema][google.cloud.discoveryengine.v1beta.Schema], + which will become the final component of the + [Schema.name][google.cloud.discoveryengine.v1beta.Schema.name]. + + This field should conform to + `RFC-1034 `__ + standard with a length limit of 63 characters. + + This corresponds to the ``schema_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1beta.types.Schema` + Defines the structure and layout of a type of document + data. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, schema, schema_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = schema_service.CreateSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + if schema_id is not None: + request.schema_id = schema_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcd_schema.Schema, + metadata_type=schema_service.CreateSchemaMetadata, + ) + + # Done; return the response. + return response + + async def update_schema( + self, + request: Optional[Union[schema_service.UpdateSchemaRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_update_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.UpdateSchemaRequest( + ) + + # Make the request + operation = client.update_schema(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.UpdateSchemaRequest, dict]]): + The request object. Request message for + [SchemaService.UpdateSchema][google.cloud.discoveryengine.v1beta.SchemaService.UpdateSchema] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1beta.types.Schema` + Defines the structure and layout of a type of document + data. + + """ + # Create or coerce a protobuf request object. + request = schema_service.UpdateSchemaRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("schema.name", request.schema.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + schema.Schema, + metadata_type=schema_service.UpdateSchemaMetadata, + ) + + # Done; return the response. + return response + + async def delete_schema( + self, + request: Optional[Union[schema_service.DeleteSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_delete_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_schema(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.DeleteSchemaRequest, dict]]): + The request object. Request message for + [SchemaService.DeleteSchema][google.cloud.discoveryengine.v1beta.SchemaService.DeleteSchema] + method. + name (:class:`str`): + Required. The full resource name of the schema, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = schema_service.DeleteSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=schema_service.DeleteSchemaMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SchemaServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/client.py new file mode 100644 index 000000000000..1f40538d30c1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/client.py @@ -0,0 +1,1201 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.services.schema_service import pagers +from google.cloud.discoveryengine_v1beta.types import schema +from google.cloud.discoveryengine_v1beta.types import schema as gcd_schema +from google.cloud.discoveryengine_v1beta.types import schema_service + +from .transports.base import DEFAULT_CLIENT_INFO, SchemaServiceTransport +from .transports.grpc import SchemaServiceGrpcTransport +from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport +from .transports.rest import SchemaServiceRestTransport + + +class SchemaServiceClientMeta(type): + """Metaclass for the SchemaService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[SchemaServiceTransport]] + _transport_registry["grpc"] = SchemaServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SchemaServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SchemaServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SchemaServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SchemaServiceClient(metaclass=SchemaServiceClientMeta): + """Service for managing + [Schema][google.cloud.discoveryengine.v1beta.Schema]s. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SchemaServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SchemaServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def schema_path( + project: str, + location: str, + data_store: str, + schema: str, + ) -> str: + """Returns a fully-qualified schema string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/schemas/{schema}".format( + project=project, + location=location, + data_store=data_store, + schema=schema, + ) + + @staticmethod + def parse_schema_path(path: str) -> Dict[str, str]: + """Parses a schema path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/schemas/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SchemaServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the schema service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SchemaServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SchemaServiceTransport): + # transport is a SchemaServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get_schema( + self, + request: Optional[Union[schema_service.GetSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Gets a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_get_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = client.get_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.GetSchemaRequest, dict]): + The request object. Request message for + [SchemaService.GetSchema][google.cloud.discoveryengine.v1beta.SchemaService.GetSchema] + method. + name (str): + Required. The full resource name of the schema, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.Schema: + Defines the structure and layout of a + type of document data. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a schema_service.GetSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema_service.GetSchemaRequest): + request = schema_service.GetSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_schemas( + self, + request: Optional[Union[schema_service.ListSchemasRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSchemasPager: + r"""Gets a list of + [Schema][google.cloud.discoveryengine.v1beta.Schema]s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_list_schemas(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.ListSchemasRequest, dict]): + The request object. Request message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas] + method. + parent (str): + Required. The parent data store resource name, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.schema_service.pagers.ListSchemasPager: + Response message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a schema_service.ListSchemasRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema_service.ListSchemasRequest): + request = schema_service.ListSchemasRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_schemas] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSchemasPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_schema( + self, + request: Optional[Union[schema_service.CreateSchemaRequest, dict]] = None, + *, + parent: Optional[str] = None, + schema: Optional[gcd_schema.Schema] = None, + schema_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_create_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.CreateSchemaRequest( + parent="parent_value", + schema_id="schema_id_value", + ) + + # Make the request + operation = client.create_schema(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.CreateSchemaRequest, dict]): + The request object. Request message for + [SchemaService.CreateSchema][google.cloud.discoveryengine.v1beta.SchemaService.CreateSchema] + method. + parent (str): + Required. The parent data store resource name, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (google.cloud.discoveryengine_v1beta.types.Schema): + Required. The + [Schema][google.cloud.discoveryengine.v1beta.Schema] to + create. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema_id (str): + Required. The ID to use for the + [Schema][google.cloud.discoveryengine.v1beta.Schema], + which will become the final component of the + [Schema.name][google.cloud.discoveryengine.v1beta.Schema.name]. + + This field should conform to + `RFC-1034 `__ + standard with a length limit of 63 characters. + + This corresponds to the ``schema_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1beta.types.Schema` + Defines the structure and layout of a type of document + data. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, schema, schema_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a schema_service.CreateSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema_service.CreateSchemaRequest): + request = schema_service.CreateSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + if schema_id is not None: + request.schema_id = schema_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcd_schema.Schema, + metadata_type=schema_service.CreateSchemaMetadata, + ) + + # Done; return the response. + return response + + def update_schema( + self, + request: Optional[Union[schema_service.UpdateSchemaRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_update_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.UpdateSchemaRequest( + ) + + # Make the request + operation = client.update_schema(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.UpdateSchemaRequest, dict]): + The request object. Request message for + [SchemaService.UpdateSchema][google.cloud.discoveryengine.v1beta.SchemaService.UpdateSchema] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1beta.types.Schema` + Defines the structure and layout of a type of document + data. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a schema_service.UpdateSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema_service.UpdateSchemaRequest): + request = schema_service.UpdateSchemaRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("schema.name", request.schema.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + schema.Schema, + metadata_type=schema_service.UpdateSchemaMetadata, + ) + + # Done; return the response. + return response + + def delete_schema( + self, + request: Optional[Union[schema_service.DeleteSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_delete_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_schema(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.DeleteSchemaRequest, dict]): + The request object. Request message for + [SchemaService.DeleteSchema][google.cloud.discoveryengine.v1beta.SchemaService.DeleteSchema] + method. + name (str): + Required. The full resource name of the schema, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a schema_service.DeleteSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema_service.DeleteSchemaRequest): + request = schema_service.DeleteSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=schema_service.DeleteSchemaMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SchemaServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SchemaServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/pagers.py new file mode 100644 index 000000000000..76978773dcbf --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1beta.types import schema, schema_service + + +class ListSchemasPager: + """A pager for iterating through ``list_schemas`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.ListSchemasResponse` object, and + provides an ``__iter__`` method to iterate through its + ``schemas`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSchemas`` requests and continue to iterate + through the ``schemas`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.ListSchemasResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., schema_service.ListSchemasResponse], + request: schema_service.ListSchemasRequest, + response: schema_service.ListSchemasResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.ListSchemasRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.ListSchemasResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = schema_service.ListSchemasRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[schema_service.ListSchemasResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[schema.Schema]: + for page in self.pages: + yield from page.schemas + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSchemasAsyncPager: + """A pager for iterating through ``list_schemas`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.ListSchemasResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``schemas`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSchemas`` requests and continue to iterate + through the ``schemas`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.ListSchemasResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[schema_service.ListSchemasResponse]], + request: schema_service.ListSchemasRequest, + response: schema_service.ListSchemasResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.ListSchemasRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.ListSchemasResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = schema_service.ListSchemasRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[schema_service.ListSchemasResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[schema.Schema]: + async def async_generator(): + async for page in self.pages: + for response in page.schemas: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/__init__.py new file mode 100644 index 000000000000..4948bcebf0a6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SchemaServiceTransport +from .grpc import SchemaServiceGrpcTransport +from .grpc_asyncio import SchemaServiceGrpcAsyncIOTransport +from .rest import SchemaServiceRestInterceptor, SchemaServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SchemaServiceTransport]] +_transport_registry["grpc"] = SchemaServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SchemaServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SchemaServiceRestTransport + +__all__ = ( + "SchemaServiceTransport", + "SchemaServiceGrpcTransport", + "SchemaServiceGrpcAsyncIOTransport", + "SchemaServiceRestTransport", + "SchemaServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/base.py new file mode 100644 index 000000000000..1c26be0786f4 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/base.py @@ -0,0 +1,241 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version +from google.cloud.discoveryengine_v1beta.types import schema, schema_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class SchemaServiceTransport(abc.ABC): + """Abstract transport class for SchemaService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_schema: gapic_v1.method.wrap_method( + self.get_schema, + default_timeout=None, + client_info=client_info, + ), + self.list_schemas: gapic_v1.method.wrap_method( + self.list_schemas, + default_timeout=None, + client_info=client_info, + ), + self.create_schema: gapic_v1.method.wrap_method( + self.create_schema, + default_timeout=None, + client_info=client_info, + ), + self.update_schema: gapic_v1.method.wrap_method( + self.update_schema, + default_timeout=None, + client_info=client_info, + ), + self.delete_schema: gapic_v1.method.wrap_method( + self.delete_schema, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def get_schema( + self, + ) -> Callable[ + [schema_service.GetSchemaRequest], + Union[schema.Schema, Awaitable[schema.Schema]], + ]: + raise NotImplementedError() + + @property + def list_schemas( + self, + ) -> Callable[ + [schema_service.ListSchemasRequest], + Union[ + schema_service.ListSchemasResponse, + Awaitable[schema_service.ListSchemasResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_schema( + self, + ) -> Callable[ + [schema_service.CreateSchemaRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_schema( + self, + ) -> Callable[ + [schema_service.UpdateSchemaRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_schema( + self, + ) -> Callable[ + [schema_service.DeleteSchemaRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SchemaServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/grpc.py new file mode 100644 index 000000000000..74695a45e1a6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/grpc.py @@ -0,0 +1,423 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1beta.types import schema, schema_service + +from .base import DEFAULT_CLIENT_INFO, SchemaServiceTransport + + +class SchemaServiceGrpcTransport(SchemaServiceTransport): + """gRPC backend transport for SchemaService. + + Service for managing + [Schema][google.cloud.discoveryengine.v1beta.Schema]s. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def get_schema(self) -> Callable[[schema_service.GetSchemaRequest], schema.Schema]: + r"""Return a callable for the get schema method over gRPC. + + Gets a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + Returns: + Callable[[~.GetSchemaRequest], + ~.Schema]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_schema" not in self._stubs: + self._stubs["get_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SchemaService/GetSchema", + request_serializer=schema_service.GetSchemaRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["get_schema"] + + @property + def list_schemas( + self, + ) -> Callable[ + [schema_service.ListSchemasRequest], schema_service.ListSchemasResponse + ]: + r"""Return a callable for the list schemas method over gRPC. + + Gets a list of + [Schema][google.cloud.discoveryengine.v1beta.Schema]s. + + Returns: + Callable[[~.ListSchemasRequest], + ~.ListSchemasResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_schemas" not in self._stubs: + self._stubs["list_schemas"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SchemaService/ListSchemas", + request_serializer=schema_service.ListSchemasRequest.serialize, + response_deserializer=schema_service.ListSchemasResponse.deserialize, + ) + return self._stubs["list_schemas"] + + @property + def create_schema( + self, + ) -> Callable[[schema_service.CreateSchemaRequest], operations_pb2.Operation]: + r"""Return a callable for the create schema method over gRPC. + + Creates a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + Returns: + Callable[[~.CreateSchemaRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_schema" not in self._stubs: + self._stubs["create_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SchemaService/CreateSchema", + request_serializer=schema_service.CreateSchemaRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_schema"] + + @property + def update_schema( + self, + ) -> Callable[[schema_service.UpdateSchemaRequest], operations_pb2.Operation]: + r"""Return a callable for the update schema method over gRPC. + + Updates a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + Returns: + Callable[[~.UpdateSchemaRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_schema" not in self._stubs: + self._stubs["update_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SchemaService/UpdateSchema", + request_serializer=schema_service.UpdateSchemaRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_schema"] + + @property + def delete_schema( + self, + ) -> Callable[[schema_service.DeleteSchemaRequest], operations_pb2.Operation]: + r"""Return a callable for the delete schema method over gRPC. + + Deletes a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + Returns: + Callable[[~.DeleteSchemaRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_schema" not in self._stubs: + self._stubs["delete_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SchemaService/DeleteSchema", + request_serializer=schema_service.DeleteSchemaRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_schema"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SchemaServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a1a06e800526 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/grpc_asyncio.py @@ -0,0 +1,433 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1beta.types import schema, schema_service + +from .base import DEFAULT_CLIENT_INFO, SchemaServiceTransport +from .grpc import SchemaServiceGrpcTransport + + +class SchemaServiceGrpcAsyncIOTransport(SchemaServiceTransport): + """gRPC AsyncIO backend transport for SchemaService. + + Service for managing + [Schema][google.cloud.discoveryengine.v1beta.Schema]s. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def get_schema( + self, + ) -> Callable[[schema_service.GetSchemaRequest], Awaitable[schema.Schema]]: + r"""Return a callable for the get schema method over gRPC. + + Gets a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + Returns: + Callable[[~.GetSchemaRequest], + Awaitable[~.Schema]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_schema" not in self._stubs: + self._stubs["get_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SchemaService/GetSchema", + request_serializer=schema_service.GetSchemaRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["get_schema"] + + @property + def list_schemas( + self, + ) -> Callable[ + [schema_service.ListSchemasRequest], + Awaitable[schema_service.ListSchemasResponse], + ]: + r"""Return a callable for the list schemas method over gRPC. + + Gets a list of + [Schema][google.cloud.discoveryengine.v1beta.Schema]s. + + Returns: + Callable[[~.ListSchemasRequest], + Awaitable[~.ListSchemasResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_schemas" not in self._stubs: + self._stubs["list_schemas"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SchemaService/ListSchemas", + request_serializer=schema_service.ListSchemasRequest.serialize, + response_deserializer=schema_service.ListSchemasResponse.deserialize, + ) + return self._stubs["list_schemas"] + + @property + def create_schema( + self, + ) -> Callable[ + [schema_service.CreateSchemaRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create schema method over gRPC. + + Creates a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + Returns: + Callable[[~.CreateSchemaRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_schema" not in self._stubs: + self._stubs["create_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SchemaService/CreateSchema", + request_serializer=schema_service.CreateSchemaRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_schema"] + + @property + def update_schema( + self, + ) -> Callable[ + [schema_service.UpdateSchemaRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update schema method over gRPC. + + Updates a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + Returns: + Callable[[~.UpdateSchemaRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_schema" not in self._stubs: + self._stubs["update_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SchemaService/UpdateSchema", + request_serializer=schema_service.UpdateSchemaRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_schema"] + + @property + def delete_schema( + self, + ) -> Callable[ + [schema_service.DeleteSchemaRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete schema method over gRPC. + + Deletes a [Schema][google.cloud.discoveryengine.v1beta.Schema]. + + Returns: + Callable[[~.DeleteSchemaRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_schema" not in self._stubs: + self._stubs["delete_schema"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SchemaService/DeleteSchema", + request_serializer=schema_service.DeleteSchemaRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_schema"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("SchemaServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/rest.py new file mode 100644 index 000000000000..9d4ef51d3413 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/rest.py @@ -0,0 +1,1235 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.types import schema, schema_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SchemaServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SchemaServiceRestInterceptor: + """Interceptor for SchemaService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SchemaServiceRestTransport. + + .. code-block:: python + class MyCustomSchemaServiceInterceptor(SchemaServiceRestInterceptor): + def pre_create_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_schemas(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_schemas(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_schema(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SchemaServiceRestTransport(interceptor=MyCustomSchemaServiceInterceptor()) + client = SchemaServiceClient(transport=transport) + + + """ + + def pre_create_schema( + self, + request: schema_service.CreateSchemaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[schema_service.CreateSchemaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_create_schema( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_schema + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_delete_schema( + self, + request: schema_service.DeleteSchemaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[schema_service.DeleteSchemaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_delete_schema( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_schema + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_get_schema( + self, + request: schema_service.GetSchemaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[schema_service.GetSchemaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_get_schema(self, response: schema.Schema) -> schema.Schema: + """Post-rpc interceptor for get_schema + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_list_schemas( + self, + request: schema_service.ListSchemasRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[schema_service.ListSchemasRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_schemas + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_list_schemas( + self, response: schema_service.ListSchemasResponse + ) -> schema_service.ListSchemasResponse: + """Post-rpc interceptor for list_schemas + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_update_schema( + self, + request: schema_service.UpdateSchemaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[schema_service.UpdateSchemaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_update_schema( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_schema + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SchemaServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SchemaServiceRestInterceptor + + +class SchemaServiceRestTransport(SchemaServiceTransport): + """REST backend transport for SchemaService. + + Service for managing + [Schema][google.cloud.discoveryengine.v1beta.Schema]s. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SchemaServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SchemaServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateSchema(SchemaServiceRestStub): + def __hash__(self): + return hash("CreateSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "schemaId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema_service.CreateSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create schema method over HTTP. + + Args: + request (~.schema_service.CreateSchemaRequest): + The request object. Request message for + [SchemaService.CreateSchema][google.cloud.discoveryengine.v1beta.SchemaService.CreateSchema] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*}/schemas", + "body": "schema", + }, + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*}/schemas", + "body": "schema", + }, + ] + request, metadata = self._interceptor.pre_create_schema(request, metadata) + pb_request = schema_service.CreateSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_schema(resp) + return resp + + class _DeleteSchema(SchemaServiceRestStub): + def __hash__(self): + return hash("DeleteSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema_service.DeleteSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete schema method over HTTP. + + Args: + request (~.schema_service.DeleteSchemaRequest): + The request object. Request message for + [SchemaService.DeleteSchema][google.cloud.discoveryengine.v1beta.SchemaService.DeleteSchema] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/schemas/*}", + }, + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_schema(request, metadata) + pb_request = schema_service.DeleteSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_schema(resp) + return resp + + class _GetSchema(SchemaServiceRestStub): + def __hash__(self): + return hash("GetSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema_service.GetSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Call the get schema method over HTTP. + + Args: + request (~.schema_service.GetSchemaRequest): + The request object. Request message for + [SchemaService.GetSchema][google.cloud.discoveryengine.v1beta.SchemaService.GetSchema] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schema.Schema: + Defines the structure and layout of a + type of document data. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/schemas/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}", + }, + ] + request, metadata = self._interceptor.pre_get_schema(request, metadata) + pb_request = schema_service.GetSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema.Schema() + pb_resp = schema.Schema.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_schema(resp) + return resp + + class _ListSchemas(SchemaServiceRestStub): + def __hash__(self): + return hash("ListSchemas") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema_service.ListSchemasRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema_service.ListSchemasResponse: + r"""Call the list schemas method over HTTP. + + Args: + request (~.schema_service.ListSchemasRequest): + The request object. Request message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schema_service.ListSchemasResponse: + Response message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*}/schemas", + }, + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*}/schemas", + }, + ] + request, metadata = self._interceptor.pre_list_schemas(request, metadata) + pb_request = schema_service.ListSchemasRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema_service.ListSchemasResponse() + pb_resp = schema_service.ListSchemasResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_schemas(resp) + return resp + + class _UpdateSchema(SchemaServiceRestStub): + def __hash__(self): + return hash("UpdateSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema_service.UpdateSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update schema method over HTTP. + + Args: + request (~.schema_service.UpdateSchemaRequest): + The request object. Request message for + [SchemaService.UpdateSchema][google.cloud.discoveryengine.v1beta.SchemaService.UpdateSchema] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{schema.name=projects/*/locations/*/dataStores/*/schemas/*}", + "body": "schema", + }, + { + "method": "patch", + "uri": "/v1beta/{schema.name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}", + "body": "schema", + }, + ] + request, metadata = self._interceptor.pre_update_schema(request, metadata) + pb_request = schema_service.UpdateSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_schema(resp) + return resp + + @property + def create_schema( + self, + ) -> Callable[[schema_service.CreateSchemaRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_schema( + self, + ) -> Callable[[schema_service.DeleteSchemaRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_schema(self) -> Callable[[schema_service.GetSchemaRequest], schema.Schema]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_schemas( + self, + ) -> Callable[ + [schema_service.ListSchemasRequest], schema_service.ListSchemasResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSchemas(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_schema( + self, + ) -> Callable[[schema_service.UpdateSchemaRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(SchemaServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(SchemaServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SchemaServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/__init__.py new file mode 100644 index 000000000000..be448b1d6b5a --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SearchServiceAsyncClient +from .client import SearchServiceClient + +__all__ = ( + "SearchServiceClient", + "SearchServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/async_client.py new file mode 100644 index 000000000000..f6d1beb1d01e --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/async_client.py @@ -0,0 +1,442 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 + +from google.cloud.discoveryengine_v1beta.services.search_service import pagers +from google.cloud.discoveryengine_v1beta.types import search_service + +from .client import SearchServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, SearchServiceTransport +from .transports.grpc_asyncio import SearchServiceGrpcAsyncIOTransport + + +class SearchServiceAsyncClient: + """Service for search.""" + + _client: SearchServiceClient + + DEFAULT_ENDPOINT = SearchServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SearchServiceClient.DEFAULT_MTLS_ENDPOINT + + branch_path = staticmethod(SearchServiceClient.branch_path) + parse_branch_path = staticmethod(SearchServiceClient.parse_branch_path) + document_path = staticmethod(SearchServiceClient.document_path) + parse_document_path = staticmethod(SearchServiceClient.parse_document_path) + serving_config_path = staticmethod(SearchServiceClient.serving_config_path) + parse_serving_config_path = staticmethod( + SearchServiceClient.parse_serving_config_path + ) + common_billing_account_path = staticmethod( + SearchServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SearchServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SearchServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SearchServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SearchServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SearchServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(SearchServiceClient.common_project_path) + parse_common_project_path = staticmethod( + SearchServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(SearchServiceClient.common_location_path) + parse_common_location_path = staticmethod( + SearchServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchServiceAsyncClient: The constructed client. + """ + return SearchServiceClient.from_service_account_info.__func__(SearchServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchServiceAsyncClient: The constructed client. + """ + return SearchServiceClient.from_service_account_file.__func__(SearchServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SearchServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SearchServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SearchServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(SearchServiceClient).get_transport_class, type(SearchServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SearchServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the search service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SearchServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SearchServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def search( + self, + request: Optional[Union[search_service.SearchRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchAsyncPager: + r"""Performs a search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_search(): + # Create a client + client = discoveryengine_v1beta.SearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.SearchRequest( + serving_config="serving_config_value", + ) + + # Make the request + page_result = client.search(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.SearchRequest, dict]]): + The request object. Request message for + [SearchService.Search][google.cloud.discoveryengine.v1beta.SearchService.Search] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.search_service.pagers.SearchAsyncPager: + Response message for + [SearchService.Search][google.cloud.discoveryengine.v1beta.SearchService.Search] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + request = search_service.SearchRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("serving_config", request.serving_config),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SearchServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/client.py new file mode 100644 index 000000000000..c6792c4eaa72 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/client.py @@ -0,0 +1,723 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 + +from google.cloud.discoveryengine_v1beta.services.search_service import pagers +from google.cloud.discoveryengine_v1beta.types import search_service + +from .transports.base import DEFAULT_CLIENT_INFO, SearchServiceTransport +from .transports.grpc import SearchServiceGrpcTransport +from .transports.grpc_asyncio import SearchServiceGrpcAsyncIOTransport +from .transports.rest import SearchServiceRestTransport + + +class SearchServiceClientMeta(type): + """Metaclass for the SearchService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[SearchServiceTransport]] + _transport_registry["grpc"] = SearchServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SearchServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SearchServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SearchServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SearchServiceClient(metaclass=SearchServiceClientMeta): + """Service for search.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SearchServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SearchServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def branch_path( + project: str, + location: str, + data_store: str, + branch: str, + ) -> str: + """Returns a fully-qualified branch string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + ) + + @staticmethod + def parse_branch_path(path: str) -> Dict[str, str]: + """Parses a branch path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/branches/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def document_path( + project: str, + location: str, + data_store: str, + branch: str, + document: str, + ) -> str: + """Returns a fully-qualified document string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + document=document, + ) + + @staticmethod + def parse_document_path(path: str) -> Dict[str, str]: + """Parses a document path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/branches/(?P.+?)/documents/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def serving_config_path( + project: str, + location: str, + data_store: str, + serving_config: str, + ) -> str: + """Returns a fully-qualified serving_config string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/servingConfigs/{serving_config}".format( + project=project, + location=location, + data_store=data_store, + serving_config=serving_config, + ) + + @staticmethod + def parse_serving_config_path(path: str) -> Dict[str, str]: + """Parses a serving_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/servingConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SearchServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the search service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SearchServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SearchServiceTransport): + # transport is a SearchServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def search( + self, + request: Optional[Union[search_service.SearchRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchPager: + r"""Performs a search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_search(): + # Create a client + client = discoveryengine_v1beta.SearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.SearchRequest( + serving_config="serving_config_value", + ) + + # Make the request + page_result = client.search(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.SearchRequest, dict]): + The request object. Request message for + [SearchService.Search][google.cloud.discoveryengine.v1beta.SearchService.Search] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.search_service.pagers.SearchPager: + Response message for + [SearchService.Search][google.cloud.discoveryengine.v1beta.SearchService.Search] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a search_service.SearchRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, search_service.SearchRequest): + request = search_service.SearchRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("serving_config", request.serving_config),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SearchServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SearchServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/pagers.py new file mode 100644 index 000000000000..734f1ffc423f --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1beta.types import search_service + + +class SearchPager: + """A pager for iterating through ``search`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.SearchResponse` object, and + provides an ``__iter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``Search`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.SearchResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., search_service.SearchResponse], + request: search_service.SearchRequest, + response: search_service.SearchResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.SearchRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.SearchResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = search_service.SearchRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[search_service.SearchResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[search_service.SearchResponse.SearchResult]: + for page in self.pages: + yield from page.results + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchAsyncPager: + """A pager for iterating through ``search`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.SearchResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``Search`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.SearchResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[search_service.SearchResponse]], + request: search_service.SearchRequest, + response: search_service.SearchResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.SearchRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.SearchResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = search_service.SearchRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[search_service.SearchResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[search_service.SearchResponse.SearchResult]: + async def async_generator(): + async for page in self.pages: + for response in page.results: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/__init__.py new file mode 100644 index 000000000000..5882bd2969d8 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SearchServiceTransport +from .grpc import SearchServiceGrpcTransport +from .grpc_asyncio import SearchServiceGrpcAsyncIOTransport +from .rest import SearchServiceRestInterceptor, SearchServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SearchServiceTransport]] +_transport_registry["grpc"] = SearchServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SearchServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SearchServiceRestTransport + +__all__ = ( + "SearchServiceTransport", + "SearchServiceGrpcTransport", + "SearchServiceGrpcAsyncIOTransport", + "SearchServiceRestTransport", + "SearchServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/base.py new file mode 100644 index 000000000000..0e583bfc842e --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/base.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version +from google.cloud.discoveryengine_v1beta.types import search_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class SearchServiceTransport(abc.ABC): + """Abstract transport class for SearchService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.search: gapic_v1.method.wrap_method( + self.search, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def search( + self, + ) -> Callable[ + [search_service.SearchRequest], + Union[search_service.SearchResponse, Awaitable[search_service.SearchResponse]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SearchServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/grpc.py new file mode 100644 index 000000000000..69bb24a3b513 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/grpc.py @@ -0,0 +1,302 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 +import grpc # type: ignore + +from google.cloud.discoveryengine_v1beta.types import search_service + +from .base import DEFAULT_CLIENT_INFO, SearchServiceTransport + + +class SearchServiceGrpcTransport(SearchServiceTransport): + """gRPC backend transport for SearchService. + + Service for search. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def search( + self, + ) -> Callable[[search_service.SearchRequest], search_service.SearchResponse]: + r"""Return a callable for the search method over gRPC. + + Performs a search. + + Returns: + Callable[[~.SearchRequest], + ~.SearchResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search" not in self._stubs: + self._stubs["search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SearchService/Search", + request_serializer=search_service.SearchRequest.serialize, + response_deserializer=search_service.SearchResponse.deserialize, + ) + return self._stubs["search"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SearchServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..1553ceb9c2e6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/grpc_asyncio.py @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1beta.types import search_service + +from .base import DEFAULT_CLIENT_INFO, SearchServiceTransport +from .grpc import SearchServiceGrpcTransport + + +class SearchServiceGrpcAsyncIOTransport(SearchServiceTransport): + """gRPC AsyncIO backend transport for SearchService. + + Service for search. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def search( + self, + ) -> Callable[ + [search_service.SearchRequest], Awaitable[search_service.SearchResponse] + ]: + r"""Return a callable for the search method over gRPC. + + Performs a search. + + Returns: + Callable[[~.SearchRequest], + Awaitable[~.SearchResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search" not in self._stubs: + self._stubs["search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SearchService/Search", + request_serializer=search_service.SearchRequest.serialize, + response_deserializer=search_service.SearchResponse.deserialize, + ) + return self._stubs["search"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("SearchServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/rest.py new file mode 100644 index 000000000000..81a604986bcf --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/rest.py @@ -0,0 +1,572 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.discoveryengine_v1beta.types import search_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SearchServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SearchServiceRestInterceptor: + """Interceptor for SearchService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SearchServiceRestTransport. + + .. code-block:: python + class MyCustomSearchServiceInterceptor(SearchServiceRestInterceptor): + def pre_search(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SearchServiceRestTransport(interceptor=MyCustomSearchServiceInterceptor()) + client = SearchServiceClient(transport=transport) + + + """ + + def pre_search( + self, request: search_service.SearchRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[search_service.SearchRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for search + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchService server. + """ + return request, metadata + + def post_search( + self, response: search_service.SearchResponse + ) -> search_service.SearchResponse: + """Post-rpc interceptor for search + + Override in a subclass to manipulate the response + after it is returned by the SearchService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SearchService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the SearchService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SearchServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SearchServiceRestInterceptor + + +class SearchServiceRestTransport(SearchServiceTransport): + """REST backend transport for SearchService. + + Service for search. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SearchServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SearchServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Search(SearchServiceRestStub): + def __hash__(self): + return hash("Search") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: search_service.SearchRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_service.SearchResponse: + r"""Call the search method over HTTP. + + Args: + request (~.search_service.SearchRequest): + The request object. Request message for + [SearchService.Search][google.cloud.discoveryengine.v1beta.SearchService.Search] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.search_service.SearchResponse: + Response message for + [SearchService.Search][google.cloud.discoveryengine.v1beta.SearchService.Search] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{serving_config=projects/*/locations/*/dataStores/*/servingConfigs/*}:search", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{serving_config=projects/*/locations/*/collections/*/dataStores/*/servingConfigs/*}:search", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_search(request, metadata) + pb_request = search_service.SearchRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = search_service.SearchResponse() + pb_resp = search_service.SearchResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search(resp) + return resp + + @property + def search( + self, + ) -> Callable[[search_service.SearchRequest], search_service.SearchResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Search(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(SearchServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(SearchServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SearchServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/async_client.py index 1e295c45d63c..e9b0650e4dcd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/async_client.py @@ -278,9 +278,9 @@ async def sample_write_user_event(): Returns: google.cloud.discoveryengine_v1beta.types.UserEvent: UserEvent captures all metadata - information DiscoveryEngine API needs to - know about how end users interact with - customers' website. + information Discovery Engine API needs + to know about how end users interact + with customers' website. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py index 5c5a7174e49f..29be87c38613 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py @@ -531,9 +531,9 @@ def sample_write_user_event(): Returns: google.cloud.discoveryengine_v1beta.types.UserEvent: UserEvent captures all metadata - information DiscoveryEngine API needs to - know about how end users interact with - customers' website. + information Discovery Engine API needs + to know about how end users interact + with customers' website. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py index a040a7f56d29..b814f51339d6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py @@ -17,7 +17,7 @@ import dataclasses import json # type: ignore import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings from google.api_core import ( @@ -180,7 +180,7 @@ def pre_get_operation( self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]], - ) -> operations_pb2.Operation: + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -189,7 +189,7 @@ def pre_get_operation( return request, metadata def post_get_operation( - self, response: operations_pb2.GetOperationRequest + self, response: operations_pb2.Operation ) -> operations_pb2.Operation: """Post-rpc interceptor for get_operation @@ -203,7 +203,7 @@ def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]], - ) -> operations_pb2.ListOperationsResponse: + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -212,7 +212,7 @@ def pre_list_operations( return request, metadata def post_list_operations( - self, response: operations_pb2.ListOperationsRequest + self, response: operations_pb2.ListOperationsResponse ) -> operations_pb2.ListOperationsResponse: """Post-rpc interceptor for list_operations @@ -332,6 +332,26 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", @@ -354,6 +374,26 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, ], "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", @@ -397,7 +437,7 @@ class _CollectUserEvent(UserEventServiceRestStub): def __hash__(self): return hash("CollectUserEvent") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "userEvent": "", } @@ -423,7 +463,6 @@ def __call__( request (~.user_event_service.CollectUserEventRequest): The request object. Request message for CollectUserEvent method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -489,6 +528,10 @@ def __call__( "method": "get", "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*}/userEvents:collect", }, + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*}/userEvents:collect", + }, ] request, metadata = self._interceptor.pre_collect_user_event( request, metadata @@ -538,7 +581,7 @@ class _ImportUserEvents(UserEventServiceRestStub): def __hash__(self): return hash("ImportUserEvents") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -562,7 +605,6 @@ def __call__( request (~.import_config.ImportUserEventsRequest): The request object. Request message for the ImportUserEvents request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -583,6 +625,11 @@ def __call__( "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*}/userEvents:import", "body": "*", }, + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*}/userEvents:import", + "body": "*", + }, ] request, metadata = self._interceptor.pre_import_user_events( request, metadata @@ -638,7 +685,7 @@ class _WriteUserEvent(UserEventServiceRestStub): def __hash__(self): return hash("WriteUserEvent") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -662,7 +709,6 @@ def __call__( request (~.user_event_service.WriteUserEventRequest): The request object. Request message for WriteUserEvent method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -672,9 +718,9 @@ def __call__( Returns: ~.user_event.UserEvent: UserEvent captures all metadata - information DiscoveryEngine API needs to - know about how end users interact with - customers' website. + information Discovery Engine API needs + to know about how end users interact + with customers' website. """ @@ -684,6 +730,11 @@ def __call__( "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*}/userEvents:write", "body": "user_event", }, + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*}/userEvents:write", + "body": "user_event", + }, ] request, metadata = self._interceptor.pre_write_user_event( request, metadata @@ -791,6 +842,26 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", @@ -874,6 +945,26 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py index f605d812a34e..63efdaff2fbd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py @@ -13,7 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .common import CustomAttribute, UserInfo +from .common import CustomAttribute, Interval, UserInfo +from .completion_service import CompleteQueryRequest, CompleteQueryResponse from .document import Document from .document_service import ( CreateDocumentRequest, @@ -34,7 +35,25 @@ ImportUserEventsRequest, ImportUserEventsResponse, ) +from .purge_config import ( + PurgeDocumentsMetadata, + PurgeDocumentsRequest, + PurgeDocumentsResponse, +) from .recommendation_service import RecommendRequest, RecommendResponse +from .schema import Schema +from .schema_service import ( + CreateSchemaMetadata, + CreateSchemaRequest, + DeleteSchemaMetadata, + DeleteSchemaRequest, + GetSchemaRequest, + ListSchemasRequest, + ListSchemasResponse, + UpdateSchemaMetadata, + UpdateSchemaRequest, +) +from .search_service import SearchRequest, SearchResponse from .user_event import ( CompletionInfo, DocumentInfo, @@ -49,7 +68,10 @@ __all__ = ( "CustomAttribute", + "Interval", "UserInfo", + "CompleteQueryRequest", + "CompleteQueryResponse", "Document", "CreateDocumentRequest", "DeleteDocumentRequest", @@ -66,8 +88,23 @@ "ImportUserEventsMetadata", "ImportUserEventsRequest", "ImportUserEventsResponse", + "PurgeDocumentsMetadata", + "PurgeDocumentsRequest", + "PurgeDocumentsResponse", "RecommendRequest", "RecommendResponse", + "Schema", + "CreateSchemaMetadata", + "CreateSchemaRequest", + "DeleteSchemaMetadata", + "DeleteSchemaRequest", + "GetSchemaRequest", + "ListSchemasRequest", + "ListSchemasResponse", + "UpdateSchemaMetadata", + "UpdateSchemaRequest", + "SearchRequest", + "SearchResponse", "CompletionInfo", "DocumentInfo", "MediaInfo", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/common.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/common.py index 17c7306391fb..15192abc39c0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/common.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/common.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -20,12 +22,64 @@ __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1beta", manifest={ + "Interval", "CustomAttribute", "UserInfo", }, ) +class Interval(proto.Message): + r"""A floating point interval. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + minimum (float): + Inclusive lower bound. + + This field is a member of `oneof`_ ``min``. + exclusive_minimum (float): + Exclusive lower bound. + + This field is a member of `oneof`_ ``min``. + maximum (float): + Inclusive upper bound. + + This field is a member of `oneof`_ ``max``. + exclusive_maximum (float): + Exclusive upper bound. + + This field is a member of `oneof`_ ``max``. + """ + + minimum: float = proto.Field( + proto.DOUBLE, + number=1, + oneof="min", + ) + exclusive_minimum: float = proto.Field( + proto.DOUBLE, + number=2, + oneof="min", + ) + maximum: float = proto.Field( + proto.DOUBLE, + number=3, + oneof="max", + ) + exclusive_maximum: float = proto.Field( + proto.DOUBLE, + number=4, + oneof="max", + ) + + class CustomAttribute(proto.Message): r"""A custom attribute that is not explicitly modeled in a resource, e.g. [UserEvent][google.cloud.discoveryengine.v1beta.UserEvent]. @@ -35,24 +89,24 @@ class CustomAttribute(proto.Message): The textual values of this custom attribute. For example, ``["yellow", "green"]`` when the key is "color". - Empty string is not allowed. Otherwise, an INVALID_ARGUMENT - error is returned. + Empty string is not allowed. Otherwise, an + ``INVALID_ARGUMENT`` error is returned. Exactly one of - [text][google.cloud.discoveryengine.v1beta.CustomAttribute.text] + [CustomAttribute.text][google.cloud.discoveryengine.v1beta.CustomAttribute.text] or - [numbers][google.cloud.discoveryengine.v1beta.CustomAttribute.numbers] - should be set. Otherwise, an INVALID_ARGUMENT error is + [CustomAttribute.numbers][google.cloud.discoveryengine.v1beta.CustomAttribute.numbers] + should be set. Otherwise, an ``INVALID_ARGUMENT`` error is returned. numbers (MutableSequence[float]): The numerical values of this custom attribute. For example, ``[2.3, 15.4]`` when the key is "lengths_cm". Exactly one of - [text][google.cloud.discoveryengine.v1beta.CustomAttribute.text] + [CustomAttribute.text][google.cloud.discoveryengine.v1beta.CustomAttribute.text] or - [numbers][google.cloud.discoveryengine.v1beta.CustomAttribute.numbers] - should be set. Otherwise, an INVALID_ARGUMENT error is + [CustomAttribute.numbers][google.cloud.discoveryengine.v1beta.CustomAttribute.numbers] + should be set. Otherwise, an ``INVALID_ARGUMENT`` error is returned. """ @@ -82,20 +136,22 @@ class UserInfo(proto.Message): which results in degraded model quality. The field must be a UTF-8 encoded string with a length limit - of 128 characters. Otherwise, an INVALID_ARGUMENT error is - returned. + of 128 characters. Otherwise, an ``INVALID_ARGUMENT`` error + is returned. user_agent (str): User agent as included in the HTTP header. Required for getting [SearchResponse.sponsored_results][]. The field must be a UTF-8 encoded string with a length limit - of 1,000 characters. Otherwise, an INVALID_ARGUMENT error is - returned. + of 1,000 characters. Otherwise, an ``INVALID_ARGUMENT`` + error is returned. This should not be set when using the client side event reporting with GTM or JavaScript tag in [UserEventService.CollectUserEvent][google.cloud.discoveryengine.v1beta.UserEventService.CollectUserEvent] - or if [direct_user_request][] is set. + or if + [UserEvent.direct_user_request][google.cloud.discoveryengine.v1beta.UserEvent.direct_user_request] + is set. """ user_id: str = proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/completion_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/completion_service.py new file mode 100644 index 000000000000..45c6cc2429a9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/completion_service.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "CompleteQueryRequest", + "CompleteQueryResponse", + }, +) + + +class CompleteQueryRequest(proto.Message): + r"""Request message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1beta.CompletionService.CompleteQuery] + method. + + Attributes: + data_store (str): + Required. The parent data store resource name for which the + completion is performed, such as + ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store``. + query (str): + Required. The typeahead input used to fetch + suggestions. Maximum length is 128 characters. + query_model (str): + Selects data model of query suggestions for serving. + Currently supported values: + + - ``document`` - Using suggestions generated from + user-imported documents. + - ``search-history`` - Using suggestions generated from the + past history of + [SearchService.Search][google.cloud.discoveryengine.v1beta.SearchService.Search] + API calls. Do not use it when there is no traffic for + Search API. + - ``user-event`` - Using suggestions generated from + user-imported search events. + + Default values: + + - ``document`` is the default model for regular dataStores. + - ``search-history`` is the default model for + [IndustryVertical.SITE_SEARCH][google.cloud.discoveryengine.v1beta.IndustryVertical.SITE_SEARCH] + dataStores. + user_pseudo_id (str): + A unique identifier for tracking visitors. For example, this + could be implemented with an HTTP cookie, which should be + able to uniquely identify a visitor on a single device. This + unique identifier should not change if the visitor logs in + or out of the website. + + This field should NOT have a fixed value such as + ``unknown_visitor``. + + This should be the same identifier as + [UserEvent.user_pseudo_id][google.cloud.discoveryengine.v1beta.UserEvent.user_pseudo_id] + and + [SearchRequest.user_pseudo_id][google.cloud.discoveryengine.v1beta.SearchRequest.user_pseudo_id]. + + The field must be a UTF-8 encoded string with a length limit + of 128 characters. Otherwise, an ``INVALID_ARGUMENT`` error + is returned. + """ + + data_store: str = proto.Field( + proto.STRING, + number=1, + ) + query: str = proto.Field( + proto.STRING, + number=2, + ) + query_model: str = proto.Field( + proto.STRING, + number=3, + ) + user_pseudo_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CompleteQueryResponse(proto.Message): + r"""Response message for + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1beta.CompletionService.CompleteQuery] + method. + + Attributes: + query_suggestions (MutableSequence[google.cloud.discoveryengine_v1beta.types.CompleteQueryResponse.QuerySuggestion]): + Results of the matched query suggestions. The + result list is ordered and the first result is a + top suggestion. + """ + + class QuerySuggestion(proto.Message): + r"""Suggestions as search queries. + + Attributes: + suggestion (str): + The suggestion for the query. + """ + + suggestion: str = proto.Field( + proto.STRING, + number=1, + ) + + query_suggestions: MutableSequence[QuerySuggestion] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=QuerySuggestion, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py index a35becf87458..b15f303c2723 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import struct_pb2 # type: ignore @@ -40,19 +42,21 @@ class Document(proto.Message): Attributes: struct_data (google.protobuf.struct_pb2.Struct): The structured JSON data for the document. It should conform - to the registered [schema][] or an INVALID_ARGUMENT error is - thrown. + to the registered + [Schema.schema][google.cloud.discoveryengine.v1beta.Schema.schema] + or an ``INVALID_ARGUMENT`` error is thrown. This field is a member of `oneof`_ ``data``. json_data (str): The JSON string representation of the document. It should - conform to the registered [schema][] or an INVALID_ARGUMENT - error is thrown. + conform to the registered + [Schema.schema][google.cloud.discoveryengine.v1beta.Schema.schema] + or an ``INVALID_ARGUMENT`` error is thrown. This field is a member of `oneof`_ ``data``. name (str): Immutable. The full resource name of the document. Format: - ``projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document_id}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document_id}``. This field must be a UTF-8 encoded string with a length limit of 1024 characters. @@ -63,8 +67,12 @@ class Document(proto.Message): `RFC-1034 `__ standard with a length limit of 63 characters. schema_id (str): - Required. The identifier of the schema - located in the same data store. + The identifier of the schema located in the + same data store. + content (google.cloud.discoveryengine_v1beta.types.Document.Content): + The unstructured data linked to this document. Content must + be set if this document is under a ``CONTENT_REQUIRED`` data + store. parent_document_id (str): The identifier of the parent document. Currently supports at most two level document hierarchy. @@ -72,8 +80,65 @@ class Document(proto.Message): Id should conform to `RFC-1034 `__ standard with a length limit of 63 characters. + derived_struct_data (google.protobuf.struct_pb2.Struct): + Output only. This field is OUTPUT_ONLY. It contains derived + data that are not in the original input document. """ + class Content(proto.Message): + r"""Unstructured data linked to this document. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + raw_bytes (bytes): + The content represented as a stream of bytes. The maximum + length is 1,000,000 bytes (1 MB / ~0.95 MiB). + + Note: As with all ``bytes`` fields, this field is + represented as pure binary in Protocol Buffers and + base64-encoded string in JSON. For example, + ``abc123!?$*&()'-=@~`` should be represented as + ``YWJjMTIzIT8kKiYoKSctPUB+`` in JSON. See + https://developers.google.com/protocol-buffers/docs/proto3#json. + + This field is a member of `oneof`_ ``content``. + uri (str): + The URI of the content. Only Cloud Storage URIs (e.g. + ``gs://bucket-name/path/to/file``) are supported. The + maximum file size is 100 MB. + + This field is a member of `oneof`_ ``content``. + mime_type (str): + The MIME type of the content. Supported types: + + - ``application/pdf`` (PDF) + - ``text/html`` (HTML) + + See + https://www.iana.org/assignments/media-types/media-types.xhtml. + """ + + raw_bytes: bytes = proto.Field( + proto.BYTES, + number=2, + oneof="content", + ) + uri: str = proto.Field( + proto.STRING, + number=3, + oneof="content", + ) + mime_type: str = proto.Field( + proto.STRING, + number=1, + ) + struct_data: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=4, @@ -97,10 +162,20 @@ class Document(proto.Message): proto.STRING, number=3, ) + content: Content = proto.Field( + proto.MESSAGE, + number=10, + message=Content, + ) parent_document_id: str = proto.Field( proto.STRING, number=7, ) + derived_struct_data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=6, + message=struct_pb2.Struct, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py index a88e61fd85ac..ca02c24ab8cc 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -42,16 +44,16 @@ class GetDocumentRequest(proto.Message): Required. Full resource name of [Document][google.cloud.discoveryengine.v1beta.Document], such as - ``projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}``. If the caller does not have permission to access the [Document][google.cloud.discoveryengine.v1beta.Document], - regardless of whether or not it exists, a PERMISSION_DENIED - error is returned. + regardless of whether or not it exists, a + ``PERMISSION_DENIED`` error is returned. If the requested [Document][google.cloud.discoveryengine.v1beta.Document] - does not exist, a NOT_FOUND error is returned. + does not exist, a ``NOT_FOUND`` error is returned. """ name: str = proto.Field( @@ -68,13 +70,13 @@ class ListDocumentsRequest(proto.Message): Attributes: parent (str): Required. The parent branch resource name, such as - ``projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. Use ``default_branch`` as the branch ID, to list documents under the default branch. If the caller does not have permission to list [Documents][]s under this branch, regardless of whether or - not this branch exists, a PERMISSION_DENIED error is + not this branch exists, a ``PERMISSION_DENIED`` error is returned. page_size (int): Maximum number of @@ -82,7 +84,7 @@ class ListDocumentsRequest(proto.Message): return. If unspecified, defaults to 100. The maximum allowed value is 1000. Values above 1000 will be coerced to 1000. - If this field is negative, an INVALID_ARGUMENT error is + If this field is negative, an ``INVALID_ARGUMENT`` error is returned. page_token (str): A page token @@ -94,7 +96,7 @@ class ListDocumentsRequest(proto.Message): When paginating, all other parameters provided to [DocumentService.ListDocuments][google.cloud.discoveryengine.v1beta.DocumentService.ListDocuments] must match the call that provided the page token. Otherwise, - an INVALID_ARGUMENT error is returned. + an ``INVALID_ARGUMENT`` error is returned. """ parent: str = proto.Field( @@ -150,7 +152,7 @@ class CreateDocumentRequest(proto.Message): Attributes: parent (str): Required. The parent resource name, such as - ``projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. document (google.cloud.discoveryengine_v1beta.types.Document): Required. The [Document][google.cloud.discoveryengine.v1beta.Document] to @@ -163,19 +165,19 @@ class CreateDocumentRequest(proto.Message): If the caller does not have permission to create the [Document][google.cloud.discoveryengine.v1beta.Document], - regardless of whether or not it exists, a PERMISSION_DENIED - error is returned. + regardless of whether or not it exists, a + ``PERMISSION_DENIED`` error is returned. This field must be unique among all [Document][google.cloud.discoveryengine.v1beta.Document]s with the same [parent][google.cloud.discoveryengine.v1beta.CreateDocumentRequest.parent]. - Otherwise, an ALREADY_EXISTS error is returned. + Otherwise, an ``ALREADY_EXISTS`` error is returned. This field must conform to `RFC-1034 `__ standard with a length limit of 63 characters. Otherwise, an - INVALID_ARGUMENT error is returned. + ``INVALID_ARGUMENT`` error is returned. """ parent: str = proto.Field( @@ -204,14 +206,14 @@ class UpdateDocumentRequest(proto.Message): If the caller does not have permission to update the [Document][google.cloud.discoveryengine.v1beta.Document], - regardless of whether or not it exists, a PERMISSION_DENIED - error is returned. + regardless of whether or not it exists, a + ``PERMISSION_DENIED`` error is returned. If the [Document][google.cloud.discoveryengine.v1beta.Document] to update does not exist and [allow_missing][google.cloud.discoveryengine.v1beta.UpdateDocumentRequest.allow_missing] - is not set, a NOT_FOUND error is returned. + is not set, a ``NOT_FOUND`` error is returned. allow_missing (bool): If set to true, and the [Document][google.cloud.discoveryengine.v1beta.Document] is @@ -241,16 +243,16 @@ class DeleteDocumentRequest(proto.Message): Required. Full resource name of [Document][google.cloud.discoveryengine.v1beta.Document], such as - ``projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}``. If the caller does not have permission to delete the [Document][google.cloud.discoveryengine.v1beta.Document], - regardless of whether or not it exists, a PERMISSION_DENIED - error is returned. + regardless of whether or not it exists, a + ``PERMISSION_DENIED`` error is returned. If the [Document][google.cloud.discoveryengine.v1beta.Document] to - delete does not exist, a NOT_FOUND error is returned. + delete does not exist, a ``NOT_FOUND`` error is returned. """ name: str = proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py index dceb363560f8..ba1df3bada3b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import timestamp_pb2 # type: ignore @@ -39,31 +41,43 @@ class GcsSource(proto.Message): - r"""Google Cloud Storage location for input content. - format. + r"""Cloud Storage location for input content. Attributes: input_uris (MutableSequence[str]): - Required. Google Cloud Storage URIs to input files. URI can - be up to 2000 characters long. URIs can match the full - object path (for example, - ``gs://bucket/directory/object.json``) or a pattern matching - one or more files, such as ``gs://bucket/directory/*.json``. - A request can contain at most 100 files, and each file can - be up to 2 GB. + Required. Cloud Storage URIs to input files. URI can be up + to 2000 characters long. URIs can match the full object path + (for example, ``gs://bucket/directory/object.json``) or a + pattern matching one or more files, such as + ``gs://bucket/directory/*.json``. + + A request can contain at most 100 files (or 100,000 files if + ``data_schema`` is ``content``). Each file can be up to 2 GB + (or 100 MB if ``data_schema`` is ``content``). data_schema (str): The schema to use when parsing the data from the source. - Supported values for imports: - - - ``user_event`` (default): One JSON - [UserEvent][google.cloud.discoveryengine.v1beta.UserEvent] - per line. + Supported values for document imports: - ``document`` (default): One JSON [Document][google.cloud.discoveryengine.v1beta.Document] per line. Each document must have a valid [Document.id][google.cloud.discoveryengine.v1beta.Document.id]. + - ``content``: Unstructured data (e.g. PDF, HTML). Each + file matched by ``input_uris`` will become a document, + with the ID set to the first 128 bits of SHA256(URI) + encoded as a hex string. + - ``custom``: One custom data JSON per row in arbitrary + format that conforms the defined + [Schema][google.cloud.discoveryengine.v1beta.Schema] of + the data store. This can only be used by the GENERIC Data + Store vertical. + + Supported values for user even imports: + + - ``user_event`` (default): One JSON + [UserEvent][google.cloud.discoveryengine.v1beta.UserEvent] + per line. """ input_uris: MutableSequence[str] = proto.RepeatedField( @@ -108,16 +122,27 @@ class BigQuerySource(proto.Message): data_schema (str): The schema to use when parsing the data from the source. - Supported values for imports: + Supported values for user event imports: - - ``user_event`` (default): One JSON + - ``user_event`` (default): One [UserEvent][google.cloud.discoveryengine.v1beta.UserEvent] - per line. + per row. - - ``document`` (default): One JSON + Supported values for document imports: + + - ``document`` (default): One [Document][google.cloud.discoveryengine.v1beta.Document] - per line. Each document must have a valid - [document.id][]. + format per row. Each document must have a valid + [Document.id][google.cloud.discoveryengine.v1beta.Document.id] + and one of + [Document.json_data][google.cloud.discoveryengine.v1beta.Document.json_data] + or + [Document.struct_data][google.cloud.discoveryengine.v1beta.Document.struct_data]. + - ``custom``: One custom data per row in arbitrary format + that conforms the defined + [Schema][google.cloud.discoveryengine.v1beta.Schema] of + the data store. This can only be used by the GENERIC Data + Store vertical. """ partition_date: date_pb2.Date = proto.Field( @@ -155,10 +180,10 @@ class ImportErrorConfig(proto.Message): Attributes: gcs_prefix (str): - Google Cloud Storage prefix for import errors. This must be - an empty, existing Cloud Storage directory. Import errors - will be written to sharded files in this directory, one per - line, as a JSON-encoded ``google.rpc.Status`` message. + Cloud Storage prefix for import errors. This must be an + empty, existing Cloud Storage directory. Import errors will + be written to sharded files in this directory, one per line, + as a JSON-encoded ``google.rpc.Status`` message. This field is a member of `oneof`_ ``destination``. """ @@ -187,8 +212,8 @@ class ImportUserEventsRequest(proto.Message): This field is a member of `oneof`_ ``source``. gcs_source (google.cloud.discoveryengine_v1beta.types.GcsSource): - Required. Google Cloud Storage location for - the input content. + Required. Cloud Storage location for the + input content. This field is a member of `oneof`_ ``source``. bigquery_source (google.cloud.discoveryengine_v1beta.types.BigQuerySource): @@ -197,7 +222,7 @@ class ImportUserEventsRequest(proto.Message): This field is a member of `oneof`_ ``source``. parent (str): Required. Parent DataStore resource name, of the form - ``projects/{project}/locations/{location}/dataStores/{data_store}`` + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`` error_config (google.cloud.discoveryengine_v1beta.types.ImportErrorConfig): The desired location of errors incurred during the Import. Cannot be set for inline user @@ -386,8 +411,7 @@ class ImportDocumentsRequest(proto.Message): This field is a member of `oneof`_ ``source``. gcs_source (google.cloud.discoveryengine_v1beta.types.GcsSource): - Google Cloud Storage location for the input - content. + Cloud Storage location for the input content. This field is a member of `oneof`_ ``source``. bigquery_source (google.cloud.discoveryengine_v1beta.types.BigQuerySource): @@ -396,7 +420,7 @@ class ImportDocumentsRequest(proto.Message): This field is a member of `oneof`_ ``source``. parent (str): Required. The parent branch resource name, such as - ``projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. Requires create/update permission. error_config (google.cloud.discoveryengine_v1beta.types.ImportErrorConfig): The desired location of errors incurred @@ -405,6 +429,69 @@ class ImportDocumentsRequest(proto.Message): The mode of reconciliation between existing documents and the documents to be imported. Defaults to [ReconciliationMode.INCREMENTAL][google.cloud.discoveryengine.v1beta.ImportDocumentsRequest.ReconciliationMode.INCREMENTAL]. + auto_generate_ids (bool): + Whether to automatically generate IDs for the documents if + absent. + + If set to ``true``, + [Document.id][google.cloud.discoveryengine.v1beta.Document.id]s + are automatically generated based on the hash of the + payload, where IDs may not be consistent during multiple + imports. In which case + [ReconciliationMode.FULL][google.cloud.discoveryengine.v1beta.ImportDocumentsRequest.ReconciliationMode.FULL] + is highly recommended to avoid duplicate contents. If unset + or set to ``false``, + [Document.id][google.cloud.discoveryengine.v1beta.Document.id]s + have to be specified using + [id_field][google.cloud.discoveryengine.v1beta.ImportDocumentsRequest.id_field], + otherwises, documents without IDs will fail to be imported. + + Only set this field when using + [GcsSource][google.cloud.discoveryengine.v1beta.GcsSource] + or + [BigQuerySource][google.cloud.discoveryengine.v1beta.BigQuerySource], + and when + [GcsSource.data_schema][google.cloud.discoveryengine.v1beta.GcsSource.data_schema] + or + [BigQuerySource.data_schema][google.cloud.discoveryengine.v1beta.BigQuerySource.data_schema] + is ``custom``. Otherwise, an INVALID_ARGUMENT error is + thrown. + id_field (str): + The field in the Cloud Storage and BigQuery sources that + indicates the unique IDs of the documents. + + For + [GcsSource][google.cloud.discoveryengine.v1beta.GcsSource] + it is the key of the JSON field. For instance, ``my_id`` for + JSON ``{"my_id": "some_uuid"}``. For + [BigQuerySource][google.cloud.discoveryengine.v1beta.BigQuerySource] + it is the column name of the BigQuery table where the unique + ids are stored. + + The values of the JSON field or the BigQuery column will be + used as the + [Document.id][google.cloud.discoveryengine.v1beta.Document.id]s. + The JSON field or the BigQuery column must be of string + type, and the values must be set as valid strings conform to + `RFC-1034 `__ with 1-63 + characters. Otherwise, documents without valid IDs will fail + to be imported. + + Only set this field when using + [GcsSource][google.cloud.discoveryengine.v1beta.GcsSource] + or + [BigQuerySource][google.cloud.discoveryengine.v1beta.BigQuerySource], + and when + [GcsSource.data_schema][google.cloud.discoveryengine.v1beta.GcsSource.data_schema] + or + [BigQuerySource.data_schema][google.cloud.discoveryengine.v1beta.BigQuerySource.data_schema] + is ``custom``. And only set this field when + [auto_generate_ids][google.cloud.discoveryengine.v1beta.ImportDocumentsRequest.auto_generate_ids] + is unset or set as ``false``. Otherwise, an INVALID_ARGUMENT + error is thrown. + + If it is unset, a default value ``_id`` is used when + importing from the allowed data sources. """ class ReconciliationMode(proto.Enum): @@ -477,6 +564,14 @@ class InlineSource(proto.Message): number=6, enum=ReconciliationMode, ) + auto_generate_ids: bool = proto.Field( + proto.BOOL, + number=8, + ) + id_field: str = proto.Field( + proto.STRING, + number=9, + ) class ImportDocumentsResponse(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py new file mode 100644 index 000000000000..b3c00d3fbafd --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "PurgeDocumentsRequest", + "PurgeDocumentsResponse", + "PurgeDocumentsMetadata", + }, +) + + +class PurgeDocumentsRequest(proto.Message): + r"""Request message for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments] + method. + + Attributes: + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + filter (str): + Required. Filter matching documents to purge. Only currently + supported value is ``*`` (all items). + force (bool): + Actually performs the purge. If ``force`` is set to false, + return the expected purge count without deleting any + documents. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class PurgeDocumentsResponse(proto.Message): + r"""Response message for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments] + method. If the long running operation is successfully done, then + this message is returned by the + google.longrunning.Operations.response field. + + Attributes: + purge_count (int): + The total count of documents purged as a + result of the operation. + purge_sample (MutableSequence[str]): + A sample of document names that will be deleted. Only + populated if ``force`` is set to false. A max of 100 names + will be returned and the names are chosen at random. + """ + + purge_count: int = proto.Field( + proto.INT64, + number=1, + ) + purge_sample: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class PurgeDocumentsMetadata(proto.Message): + r"""Metadata related to the progress of the PurgeDocuments + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + success_count (int): + Count of entries that were deleted + successfully. + failure_count (int): + Count of entries that encountered errors + while processing. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + success_count: int = proto.Field( + proto.INT64, + number=3, + ) + failure_count: int = proto.Field( + proto.INT64, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/recommendation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/recommendation_service.py index 464f3c2e198a..7491a3ae9af2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/recommendation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/recommendation_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import struct_pb2 # type: ignore @@ -36,7 +38,7 @@ class RecommendRequest(proto.Message): Attributes: serving_config (str): Required. Full resource name of the format: - projects/\ */locations/global/dataStores/*/servingConfigs/\* + ``projects/*/locations/global/collections/*/dataStores/*/servingConfigs/*`` Before you can request recommendations from your model, you must create at least one serving config for it. @@ -72,10 +74,8 @@ class RecommendRequest(proto.Message): Examples: - - (filter_tags: ANY("Red", "Blue") OR filter_tags: - ANY("Hot", "Cold")) - - (filter_tags: ANY("Red", "Blue")) AND NOT (filter_tags: - ANY("Green")) + - ``(filter_tags: ANY("Red", "Blue") OR filter_tags: ANY("Hot", "Cold"))`` + - ``(filter_tags: ANY("Red", "Blue")) AND NOT (filter_tags: ANY("Green"))`` If your filter blocks all results, the API will return generic (unfiltered) popular Documents. If you only want @@ -84,9 +84,10 @@ class RecommendRequest(proto.Message): [RecommendRequest.params][google.cloud.discoveryengine.v1beta.RecommendRequest.params] to receive empty results instead. - Note that the API will never return Documents with - storageStatus of "EXPIRED" or "DELETED" regardless of filter - choices. + Note that the API will never return + [Document][google.cloud.discoveryengine.v1beta.Document]s + with ``storageStatus`` of ``EXPIRED`` or ``DELETED`` + regardless of filter choices. validate_only (bool): Use validate only mode for this recommendation query. If set to true, a fake @@ -102,11 +103,11 @@ class RecommendRequest(proto.Message): - ``returnDocument``: Boolean. If set to true, the associated Document object will be returned in - [RecommendResponse.results.document][RecommendationResult.document]. + [RecommendResponse.RecommendationResult.document][google.cloud.discoveryengine.v1beta.RecommendResponse.RecommendationResult.document]. - ``returnScore``: Boolean. If set to true, the recommendation 'score' corresponding to each returned Document will be set in - [RecommendResponse.results.metadata][RecommendationResult.metadata]. + [RecommendResponse.RecommendationResult.metadata][google.cloud.discoveryengine.v1beta.RecommendResponse.RecommendationResult.metadata]. The given 'score' indicates the probability of a Document conversion given the user's context and history. - ``strictFiltering``: Boolean. True by default. If set to @@ -116,12 +117,12 @@ class RecommendRequest(proto.Message): - ``diversityLevel``: String. Default empty. If set to be non-empty, then it needs to be one of: - - 'no-diversity' - - 'low-diversity' - - 'medium-diversity' - - 'high-diversity' - - 'auto-diversity' This gives request-level control and - adjusts recommendation results based on Document + - ``no-diversity`` + - ``low-diversity`` + - ``medium-diversity`` + - ``high-diversity`` + - ``auto-diversity`` This gives request-level control + and adjusts recommendation results based on Document category. user_labels (MutableMapping[str, str]): The user labels applied to a resource must meet the @@ -142,8 +143,8 @@ class RecommendRequest(proto.Message): - Keys must start with a lowercase letter or international character. - See `Google Cloud - Document `__ + See `Requirements for + labels `__ for more details. """ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/schema.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/schema.py new file mode 100644 index 000000000000..e3a3faba92de --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/schema.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "Schema", + }, +) + + +class Schema(proto.Message): + r"""Defines the structure and layout of a type of document data. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + struct_schema (google.protobuf.struct_pb2.Struct): + The structured representation of the schema. + + This field is a member of `oneof`_ ``schema``. + json_schema (str): + The JSON representation of the schema. + + This field is a member of `oneof`_ ``schema``. + name (str): + Immutable. The full resource name of the schema, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/schemas/{schema}``. + + This field must be a UTF-8 encoded string with a length + limit of 1024 characters. + """ + + struct_schema: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + oneof="schema", + message=struct_pb2.Struct, + ) + json_schema: str = proto.Field( + proto.STRING, + number=3, + oneof="schema", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/schema_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/schema_service.py new file mode 100644 index 000000000000..437a394418d9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/schema_service.py @@ -0,0 +1,290 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1beta.types import schema as gcd_schema + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "GetSchemaRequest", + "ListSchemasRequest", + "ListSchemasResponse", + "CreateSchemaRequest", + "UpdateSchemaRequest", + "DeleteSchemaRequest", + "CreateSchemaMetadata", + "UpdateSchemaMetadata", + "DeleteSchemaMetadata", + }, +) + + +class GetSchemaRequest(proto.Message): + r"""Request message for + [SchemaService.GetSchema][google.cloud.discoveryengine.v1beta.SchemaService.GetSchema] + method. + + Attributes: + name (str): + Required. The full resource name of the schema, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/schemas/{schema}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSchemasRequest(proto.Message): + r"""Request message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas] + method. + + Attributes: + parent (str): + Required. The parent data store resource name, in the format + of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + page_size (int): + The maximum number of + [Schema][google.cloud.discoveryengine.v1beta.Schema]s to + return. The service may return fewer than this value. + + If unspecified, at most 100 + [Schema][google.cloud.discoveryengine.v1beta.Schema]s will + be returned. + + The maximum value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + A page token, received from a previous + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas] + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas] + must match the call that provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSchemasResponse(proto.Message): + r"""Response message for + [SchemaService.ListSchemas][google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas] + method. + + Attributes: + schemas (MutableSequence[google.cloud.discoveryengine_v1beta.types.Schema]): + The [Schema][google.cloud.discoveryengine.v1beta.Schema]s. + next_page_token (str): + A token that can be sent as + [ListSchemasRequest.page_token][google.cloud.discoveryengine.v1beta.ListSchemasRequest.page_token] + to retrieve the next page. If this field is omitted, there + are no subsequent pages. + """ + + @property + def raw_page(self): + return self + + schemas: MutableSequence[gcd_schema.Schema] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_schema.Schema, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateSchemaRequest(proto.Message): + r"""Request message for + [SchemaService.CreateSchema][google.cloud.discoveryengine.v1beta.SchemaService.CreateSchema] + method. + + Attributes: + parent (str): + Required. The parent data store resource name, in the format + of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. + schema (google.cloud.discoveryengine_v1beta.types.Schema): + Required. The + [Schema][google.cloud.discoveryengine.v1beta.Schema] to + create. + schema_id (str): + Required. The ID to use for the + [Schema][google.cloud.discoveryengine.v1beta.Schema], which + will become the final component of the + [Schema.name][google.cloud.discoveryengine.v1beta.Schema.name]. + + This field should conform to + `RFC-1034 `__ standard + with a length limit of 63 characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + schema: gcd_schema.Schema = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_schema.Schema, + ) + schema_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateSchemaRequest(proto.Message): + r"""Request message for + [SchemaService.UpdateSchema][google.cloud.discoveryengine.v1beta.SchemaService.UpdateSchema] + method. + + Attributes: + schema (google.cloud.discoveryengine_v1beta.types.Schema): + Required. The + [Schema][google.cloud.discoveryengine.v1beta.Schema] to + update. + allow_missing (bool): + If set to true, and the + [Schema][google.cloud.discoveryengine.v1beta.Schema] is not + found, a new + [Schema][google.cloud.discoveryengine.v1beta.Schema] will be + created. In this situation, ``update_mask`` is ignored. + """ + + schema: gcd_schema.Schema = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_schema.Schema, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteSchemaRequest(proto.Message): + r"""Request message for + [SchemaService.DeleteSchema][google.cloud.discoveryengine.v1beta.SchemaService.DeleteSchema] + method. + + Attributes: + name (str): + Required. The full resource name of the schema, in the + format of + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/schemas/{schema}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateSchemaMetadata(proto.Message): + r"""Metadata for Create Schema LRO. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class UpdateSchemaMetadata(proto.Message): + r"""Metadata for UpdateSchema LRO. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class DeleteSchemaMetadata(proto.Message): + r"""Metadata for DeleteSchema LRO. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py new file mode 100644 index 000000000000..192d395a7314 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py @@ -0,0 +1,909 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1beta.types import common +from google.cloud.discoveryengine_v1beta.types import document as gcd_document + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "SearchRequest", + "SearchResponse", + }, +) + + +class SearchRequest(proto.Message): + r"""Request message for + [SearchService.Search][google.cloud.discoveryengine.v1beta.SearchService.Search] + method. + + Attributes: + serving_config (str): + Required. The resource name of the Search serving config, + such as + ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store/servingConfigs/default_serving_config``. + This field is used to identify the serving configuration + name, set of models used to make the search. + branch (str): + The branch resource name, such as + ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store/branches/0``. + + Use ``default_branch`` as the branch ID or leave this field + empty, to search documents under the default branch. + query (str): + Raw search query. + page_size (int): + Maximum number of + [Document][google.cloud.discoveryengine.v1beta.Document]s to + return. If unspecified, defaults to a reasonable value. The + maximum allowed value is 100. Values above 100 will be + coerced to 100. + + If this field is negative, an ``INVALID_ARGUMENT`` is + returned. + page_token (str): + A page token received from a previous + [SearchService.Search][google.cloud.discoveryengine.v1beta.SearchService.Search] + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + [SearchService.Search][google.cloud.discoveryengine.v1beta.SearchService.Search] + must match the call that provided the page token. Otherwise, + an ``INVALID_ARGUMENT`` error is returned. + offset (int): + A 0-indexed integer that specifies the current offset (that + is, starting result location, amongst the + [Document][google.cloud.discoveryengine.v1beta.Document]s + deemed by the API as relevant) in search results. This field + is only considered if + [page_token][google.cloud.discoveryengine.v1beta.SearchRequest.page_token] + is unset. + + If this field is negative, an ``INVALID_ARGUMENT`` is + returned. + filter (str): + The filter syntax consists of an expression language for + constructing a predicate from one or more fields of the + documents being filtered. Filter expression is + case-sensitive. + + If this field is unrecognizable, an ``INVALID_ARGUMENT`` is + returned. + order_by (str): + The order in which documents are returned. Document can be + ordered by a field in an + [Document][google.cloud.discoveryengine.v1beta.Document] + object. Leave it unset if ordered by relevance. OrderBy + expression is case-sensitive. + + If this field is unrecognizable, an ``INVALID_ARGUMENT`` is + returned. + user_info (google.cloud.discoveryengine_v1beta.types.UserInfo): + Information about the end user. Highly recommended for + analytics. The user_agent string in UserInfo will be used to + deduce device_type for analytics. + facet_specs (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchRequest.FacetSpec]): + Facet specifications for faceted search. If empty, no facets + are returned. + + A maximum of 100 values are allowed. Otherwise, an + ``INVALID_ARGUMENT`` error is returned. + boost_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.BoostSpec): + Boost specification to boost certain + documents. + params (MutableMapping[str, google.protobuf.struct_pb2.Value]): + Additional search parameters. + + For public website search only, supported values are: + + - ``user_country_code``: string. Default empty. If set to + non-empty, results are restricted or boosted based on the + location provided. + - ``search_type``: double. Default empty. Enables + non-webpage searching depending on the value. The only + valid non-default value is 1, which enables image + searching. + query_expansion_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.QueryExpansionSpec): + The query expansion specification that + specifies the conditions under which query + expansion will occur. + spell_correction_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.SpellCorrectionSpec): + The spell correction specification that + specifies the mode under which spell correction + will take effect. + user_pseudo_id (str): + A unique identifier for tracking visitors. For example, this + could be implemented with an HTTP cookie, which should be + able to uniquely identify a visitor on a single device. This + unique identifier should not change if the visitor logs in + or out of the website. + + This field should NOT have a fixed value such as + ``unknown_visitor``. + + This should be the same identifier as + [UserEvent.user_pseudo_id][google.cloud.discoveryengine.v1beta.UserEvent.user_pseudo_id] + and + [CompleteQueryRequest.user_pseudo_id][google.cloud.discoveryengine.v1beta.CompleteQueryRequest.user_pseudo_id] + + The field must be a UTF-8 encoded string with a length limit + of 128 characters. Otherwise, an ``INVALID_ARGUMENT`` error + is returned. + content_search_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.ContentSearchSpec): + The content search spec that configs the + desired behavior of content search. + safe_search (bool): + Whether to turn on safe search. This is only supported for + [ContentConfig.PUBLIC_WEBSITE][]. + user_labels (MutableMapping[str, str]): + The user labels applied to a resource must meet the + following requirements: + + - Each resource can have multiple labels, up to a maximum + of 64. + - Each label must be a key-value pair. + - Keys have a minimum length of 1 character and a maximum + length of 63 characters and cannot be empty. Values can + be empty and have a maximum length of 63 characters. + - Keys and values can contain only lowercase letters, + numeric characters, underscores, and dashes. All + characters must use UTF-8 encoding, and international + characters are allowed. + - The key portion of a label must be unique. However, you + can use the same key with multiple resources. + - Keys must start with a lowercase letter or international + character. + + See `Google Cloud + Document `__ + for more details. + """ + + class FacetSpec(proto.Message): + r"""A facet specification to perform faceted search. + + Attributes: + facet_key (google.cloud.discoveryengine_v1beta.types.SearchRequest.FacetSpec.FacetKey): + Required. The facet key specification. + limit (int): + Maximum of facet values that should be returned for this + facet. If unspecified, defaults to 20. The maximum allowed + value is 300. Values above 300 will be coerced to 300. + + If this field is negative, an ``INVALID_ARGUMENT`` is + returned. + excluded_filter_keys (MutableSequence[str]): + List of keys to exclude when faceting. + + By default, + [FacetKey.key][google.cloud.discoveryengine.v1beta.SearchRequest.FacetSpec.FacetKey.key] + is not excluded from the filter unless it is listed in this + field. + + Listing a facet key in this field allows its values to + appear as facet results, even when they are filtered out of + search results. Using this field does not affect what search + results are returned. + + For example, suppose there are 100 documents with the color + facet "Red" and 200 documents with the color facet "Blue". A + query containing the filter "color:ANY("Red")" and having + "color" as + [FacetKey.key][google.cloud.discoveryengine.v1beta.SearchRequest.FacetSpec.FacetKey.key] + would by default return only "Red" documents in the search + results, and also return "Red" with count 100 as the only + color facet. Although there are also blue documents + available, "Blue" would not be shown as an available facet + value. + + If "color" is listed in "excludedFilterKeys", then the query + returns the facet values "Red" with count 100 and "Blue" + with count 200, because the "color" key is now excluded from + the filter. Because this field doesn't affect search + results, the search results are still correctly filtered to + return only "Red" documents. + + A maximum of 100 values are allowed. Otherwise, an + ``INVALID_ARGUMENT`` error is returned. + enable_dynamic_position (bool): + Enables dynamic position for this facet. If set to true, the + position of this facet among all facets in the response is + determined automatically. It will be ordered together with + dynamic facets if dynamic facets is enabled. If set to + false, the position of this facet in the response will be + the same as in the request, and it will be ranked before the + facets with dynamic position enable and all dynamic facets. + + For example, you may always want to have rating facet + returned in the response, but it's not necessarily to always + display the rating facet at the top. In that case, you can + set enable_dynamic_position to true so that the position of + rating facet in response will be determined automatically. + + Another example, assuming you have the following facets in + the request: + + - "rating", enable_dynamic_position = true + + - "price", enable_dynamic_position = false + + - "brands", enable_dynamic_position = false + + And also you have a dynamic facets enable, which will + generate a facet 'gender'. Then the final order of the + facets in the response can be ("price", "brands", "rating", + "gender") or ("price", "brands", "gender", "rating") depends + on how API orders "gender" and "rating" facets. However, + notice that "price" and "brands" will always be ranked at + 1st and 2nd position since their enable_dynamic_position are + false. + """ + + class FacetKey(proto.Message): + r"""Specifies how a facet is computed. + + Attributes: + key (str): + Required. Supported textual and numerical facet keys in + [Document][google.cloud.discoveryengine.v1beta.Document] + object, over which the facet values are computed. Facet key + is case-sensitive. + intervals (MutableSequence[google.cloud.discoveryengine_v1beta.types.Interval]): + Set only if values should be bucketized into + intervals. Must be set for facets with numerical + values. Must not be set for facet with text + values. Maximum number of intervals is 30. + restricted_values (MutableSequence[str]): + Only get facet for the given restricted values. Only + supported on textual fields. For example, suppose "category" + has three values "Action > 2022", "Action > 2021" and + "Sci-Fi > 2022". If set "restricted_values" to "Action > + 2022", the "category" facet will only contain "Action > + 2022". Only supported on textual fields. Maximum is 10. + prefixes (MutableSequence[str]): + Only get facet values that start with the + given string prefix. For example, suppose + "category" has three values "Action > 2022", + "Action > 2021" and "Sci-Fi > 2022". If set + "prefixes" to "Action", the "category" facet + will only contain "Action > 2022" and "Action > + 2021". Only supported on textual fields. Maximum + is 10. + contains (MutableSequence[str]): + Only get facet values that contains the given + strings. For example, suppose "category" has + three values "Action > 2022", "Action > 2021" + and "Sci-Fi > 2022". If set "contains" to + "2022", the "category" facet will only contain + "Action > 2022" and "Sci-Fi > 2022". Only + supported on textual fields. Maximum is 10. + case_insensitive (bool): + True to make facet keys case insensitive when + getting faceting values with prefixes or + contains; false otherwise. + order_by (str): + The order in which documents are returned. + + Allowed values are: + + - "count desc", which means order by + [SearchResponse.Facet.values.count][google.cloud.discoveryengine.v1beta.SearchResponse.Facet.FacetValue.count] + descending. + + - "value desc", which means order by + [SearchResponse.Facet.values.value][google.cloud.discoveryengine.v1beta.SearchResponse.Facet.FacetValue.value] + descending. Only applies to textual facets. + + If not set, textual values are sorted in `natural + order `__; + numerical intervals are sorted in the order given by + [FacetSpec.FacetKey.intervals][google.cloud.discoveryengine.v1beta.SearchRequest.FacetSpec.FacetKey.intervals]. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + intervals: MutableSequence[common.Interval] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=common.Interval, + ) + restricted_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + prefixes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + contains: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + case_insensitive: bool = proto.Field( + proto.BOOL, + number=6, + ) + order_by: str = proto.Field( + proto.STRING, + number=7, + ) + + facet_key: "SearchRequest.FacetSpec.FacetKey" = proto.Field( + proto.MESSAGE, + number=1, + message="SearchRequest.FacetSpec.FacetKey", + ) + limit: int = proto.Field( + proto.INT32, + number=2, + ) + excluded_filter_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + enable_dynamic_position: bool = proto.Field( + proto.BOOL, + number=4, + ) + + class BoostSpec(proto.Message): + r"""Boost specification to boost certain documents. + + Attributes: + condition_boost_specs (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchRequest.BoostSpec.ConditionBoostSpec]): + Condition boost specifications. If a document + matches multiple conditions in the + specifictions, boost scores from these + specifications are all applied and combined in a + non-linear way. Maximum number of specifications + is 20. + """ + + class ConditionBoostSpec(proto.Message): + r"""Boost applies to documents which match a condition. + + Attributes: + condition (str): + An expression which specifies a boost condition. The syntax + and supported fields are the same as a filter expression. + See + [SearchRequest.filter][google.cloud.discoveryengine.v1beta.SearchRequest.filter] + for detail syntax and limitations. + + Examples: + + - To boost documents with document ID "doc_1" or "doc_2", + and color "Red" or "Blue": + + - (id: ANY("doc_1", "doc_2")) AND (color: + ANY("Red","Blue")) + boost (float): + Strength of the condition boost, which should be in [-1, 1]. + Negative boost means demotion. Default is 0.0. + + Setting to 1.0 gives the document a big promotion. However, + it does not necessarily mean that the boosted document will + be the top result at all times, nor that other documents + will be excluded. Results could still be shown even when + none of them matches the condition. And results that are + significantly more relevant to the search query can still + trump your heavily favored but irrelevant documents. + + Setting to -1.0 gives the document a big demotion. However, + results that are deeply relevant might still be shown. The + document will have an upstream battle to get a fairly high + ranking, but it is not blocked out completely. + + Setting to 0.0 means no boost applied. The boosting + condition is ignored. + """ + + condition: str = proto.Field( + proto.STRING, + number=1, + ) + boost: float = proto.Field( + proto.FLOAT, + number=2, + ) + + condition_boost_specs: MutableSequence[ + "SearchRequest.BoostSpec.ConditionBoostSpec" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SearchRequest.BoostSpec.ConditionBoostSpec", + ) + + class QueryExpansionSpec(proto.Message): + r"""Specification to determine under which conditions query + expansion should occur. + + Attributes: + condition (google.cloud.discoveryengine_v1beta.types.SearchRequest.QueryExpansionSpec.Condition): + The condition under which query expansion should occur. + Default to + [Condition.DISABLED][google.cloud.discoveryengine.v1beta.SearchRequest.QueryExpansionSpec.Condition.DISABLED]. + """ + + class Condition(proto.Enum): + r"""Enum describing under which condition query expansion should + occur. + + Values: + CONDITION_UNSPECIFIED (0): + Unspecified query expansion condition. In this case, server + behavior defaults to + [Condition.DISABLED][google.cloud.discoveryengine.v1beta.SearchRequest.QueryExpansionSpec.Condition.DISABLED]. + DISABLED (1): + Disabled query expansion. Only the exact search query is + used, even if + [SearchResponse.total_size][google.cloud.discoveryengine.v1beta.SearchResponse.total_size] + is zero. + AUTO (2): + Automatic query expansion built by the Search + API. + """ + CONDITION_UNSPECIFIED = 0 + DISABLED = 1 + AUTO = 2 + + condition: "SearchRequest.QueryExpansionSpec.Condition" = proto.Field( + proto.ENUM, + number=1, + enum="SearchRequest.QueryExpansionSpec.Condition", + ) + + class SpellCorrectionSpec(proto.Message): + r"""The specification for query spell correction. + + Attributes: + mode (google.cloud.discoveryengine_v1beta.types.SearchRequest.SpellCorrectionSpec.Mode): + The mode under which spell correction should take effect to + replace the original search query. Default to + [Mode.AUTO][google.cloud.discoveryengine.v1beta.SearchRequest.SpellCorrectionSpec.Mode.AUTO]. + """ + + class Mode(proto.Enum): + r"""Enum describing under which mode spell correction should + occur. + + Values: + MODE_UNSPECIFIED (0): + Unspecified spell correction mode. In this case, server + behavior defaults to + [Mode.AUTO][google.cloud.discoveryengine.v1beta.SearchRequest.SpellCorrectionSpec.Mode.AUTO]. + SUGGESTION_ONLY (1): + Search API will try to find a spell suggestion if there is + any and put in the + [SearchResponse.corrected_query][google.cloud.discoveryengine.v1beta.SearchResponse.corrected_query]. + The spell suggestion will not be used as the search query. + AUTO (2): + Automatic spell correction built by the + Search API. Search will be based on the + corrected query if found. + """ + MODE_UNSPECIFIED = 0 + SUGGESTION_ONLY = 1 + AUTO = 2 + + mode: "SearchRequest.SpellCorrectionSpec.Mode" = proto.Field( + proto.ENUM, + number=1, + enum="SearchRequest.SpellCorrectionSpec.Mode", + ) + + class ContentSearchSpec(proto.Message): + r"""The specification that configs the desired behavior of the + UCS content search. + + Attributes: + snippet_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.ContentSearchSpec.SnippetSpec): + If there is no snippet spec provided, there + will be no snippet in the search result. + summary_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.ContentSearchSpec.SummarySpec): + If there is no summary spec provided, there + will be no summary in the search response. + """ + + class SnippetSpec(proto.Message): + r"""The specification that configs the snippet in the search + results. + + Attributes: + max_snippet_count (int): + Max number of snippets returned in each search result. If + the matching snippets is less than the max_snippet_count, + return all of the snippets; otherwise, return the + max_snippet_count. + + At most 5 snippets will be returned for each SearchResult. + reference_only (bool): + if true, only snippet reference is returned. + """ + + max_snippet_count: int = proto.Field( + proto.INT32, + number=1, + ) + reference_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class SummarySpec(proto.Message): + r"""The specification that configs the summary in the search + response. + + Attributes: + summary_result_count (int): + The number of top results the summary should be generated + from. If the number of returned results is less than + summary_result_count, then the summary would be derived from + all the results; otherwise, the summary would be derived + from the top results. + + At most 5 results can be used for generating summary. + """ + + summary_result_count: int = proto.Field( + proto.INT32, + number=1, + ) + + snippet_spec: "SearchRequest.ContentSearchSpec.SnippetSpec" = proto.Field( + proto.MESSAGE, + number=1, + message="SearchRequest.ContentSearchSpec.SnippetSpec", + ) + summary_spec: "SearchRequest.ContentSearchSpec.SummarySpec" = proto.Field( + proto.MESSAGE, + number=2, + message="SearchRequest.ContentSearchSpec.SummarySpec", + ) + + serving_config: str = proto.Field( + proto.STRING, + number=1, + ) + branch: str = proto.Field( + proto.STRING, + number=2, + ) + query: str = proto.Field( + proto.STRING, + number=3, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + offset: int = proto.Field( + proto.INT32, + number=6, + ) + filter: str = proto.Field( + proto.STRING, + number=7, + ) + order_by: str = proto.Field( + proto.STRING, + number=8, + ) + user_info: common.UserInfo = proto.Field( + proto.MESSAGE, + number=21, + message=common.UserInfo, + ) + facet_specs: MutableSequence[FacetSpec] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=FacetSpec, + ) + boost_spec: BoostSpec = proto.Field( + proto.MESSAGE, + number=10, + message=BoostSpec, + ) + params: MutableMapping[str, struct_pb2.Value] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=11, + message=struct_pb2.Value, + ) + query_expansion_spec: QueryExpansionSpec = proto.Field( + proto.MESSAGE, + number=13, + message=QueryExpansionSpec, + ) + spell_correction_spec: SpellCorrectionSpec = proto.Field( + proto.MESSAGE, + number=14, + message=SpellCorrectionSpec, + ) + user_pseudo_id: str = proto.Field( + proto.STRING, + number=15, + ) + content_search_spec: ContentSearchSpec = proto.Field( + proto.MESSAGE, + number=24, + message=ContentSearchSpec, + ) + safe_search: bool = proto.Field( + proto.BOOL, + number=20, + ) + user_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=22, + ) + + +class SearchResponse(proto.Message): + r"""Response message for + [SearchService.Search][google.cloud.discoveryengine.v1beta.SearchService.Search] + method. + + Attributes: + results (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchResponse.SearchResult]): + A list of matched documents. The order + represents the ranking. + facets (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchResponse.Facet]): + Results of facets requested by user. + guided_search_result (google.cloud.discoveryengine_v1beta.types.SearchResponse.GuidedSearchResult): + Guided search result. + total_size (int): + The estimated total count of matched items irrespective of + pagination. The count of + [results][google.cloud.discoveryengine.v1beta.SearchResponse.results] + returned by pagination may be less than the + [total_size][google.cloud.discoveryengine.v1beta.SearchResponse.total_size] + that matches. + attribution_token (str): + A unique search token. This should be included in the + [UserEvent][google.cloud.discoveryengine.v1beta.UserEvent] + logs resulting from this search, which enables accurate + attribution of search model performance. + next_page_token (str): + A token that can be sent as + [SearchRequest.page_token][google.cloud.discoveryengine.v1beta.SearchRequest.page_token] + to retrieve the next page. If this field is omitted, there + are no subsequent pages. + corrected_query (str): + Contains the spell corrected query, if found. If the spell + correction type is AUTOMATIC, then the search results are + based on corrected_query. Otherwise the original query is + used for search. + summary (google.cloud.discoveryengine_v1beta.types.SearchResponse.Summary): + A summary as part of the search results. This field is only + returned if + [SearchRequest.ContentSearchSpec.summary_spec][google.cloud.discoveryengine.v1beta.SearchRequest.ContentSearchSpec.summary_spec] + is set. + applied_controls (MutableSequence[str]): + Controls applied as part of the Control + service. + """ + + class SearchResult(proto.Message): + r"""Represents the search results. + + Attributes: + id (str): + [Document.id][google.cloud.discoveryengine.v1beta.Document.id] + of the searched + [Document][google.cloud.discoveryengine.v1beta.Document]. + document (google.cloud.discoveryengine_v1beta.types.Document): + The document data snippet in the search + response. Only fields that are marked as + retrievable are populated. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + document: gcd_document.Document = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_document.Document, + ) + + class Facet(proto.Message): + r"""A facet result. + + Attributes: + key (str): + The key for this facet. E.g., "colors" or "price". It + matches + [SearchRequest.FacetSpec.FacetKey.key][google.cloud.discoveryengine.v1beta.SearchRequest.FacetSpec.FacetKey.key]. + values (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchResponse.Facet.FacetValue]): + The facet values for this field. + dynamic_facet (bool): + Whether the facet is dynamically generated. + """ + + class FacetValue(proto.Message): + r"""A facet value which contains value names and their count. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + Text value of a facet, such as "Black" for + facet "colors". + + This field is a member of `oneof`_ ``facet_value``. + interval (google.cloud.discoveryengine_v1beta.types.Interval): + Interval value for a facet, such as [10, 20) for facet + "price". It matches + [SearchRequest.FacetSpec.FacetKey.intervals][google.cloud.discoveryengine.v1beta.SearchRequest.FacetSpec.FacetKey.intervals]. + + This field is a member of `oneof`_ ``facet_value``. + count (int): + Number of items that have this facet value. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + oneof="facet_value", + ) + interval: common.Interval = proto.Field( + proto.MESSAGE, + number=2, + oneof="facet_value", + message=common.Interval, + ) + count: int = proto.Field( + proto.INT64, + number=3, + ) + + key: str = proto.Field( + proto.STRING, + number=1, + ) + values: MutableSequence[ + "SearchResponse.Facet.FacetValue" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="SearchResponse.Facet.FacetValue", + ) + dynamic_facet: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class GuidedSearchResult(proto.Message): + r"""Guided search result. The guided search helps user to refine + the search results and narrow down to the real needs from a + broaded search results. + + Attributes: + refinement_attributes (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchResponse.GuidedSearchResult.RefinementAttribute]): + A list of ranked refinement attributes. + """ + + class RefinementAttribute(proto.Message): + r"""Useful attribute for search result refinements. + + Attributes: + attribute_key (str): + Attribute key used to refine the results e.g. 'movie_type'. + attribute_value (str): + Attribute value used to refine the results + e.g. 'drama'. + """ + + attribute_key: str = proto.Field( + proto.STRING, + number=1, + ) + attribute_value: str = proto.Field( + proto.STRING, + number=2, + ) + + refinement_attributes: MutableSequence[ + "SearchResponse.GuidedSearchResult.RefinementAttribute" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SearchResponse.GuidedSearchResult.RefinementAttribute", + ) + + class Summary(proto.Message): + r"""Summary of the top N search result specified by the summary + spec. + + Attributes: + summary_text (str): + The summary content. + """ + + summary_text: str = proto.Field( + proto.STRING, + number=1, + ) + + @property + def raw_page(self): + return self + + results: MutableSequence[SearchResult] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=SearchResult, + ) + facets: MutableSequence[Facet] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Facet, + ) + guided_search_result: GuidedSearchResult = proto.Field( + proto.MESSAGE, + number=8, + message=GuidedSearchResult, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) + attribution_token: str = proto.Field( + proto.STRING, + number=4, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=5, + ) + corrected_query: str = proto.Field( + proto.STRING, + number=7, + ) + summary: Summary = proto.Field( + proto.MESSAGE, + number=9, + message=Summary, + ) + applied_controls: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=10, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py index dff9ece680f5..2c4779f170de 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import duration_pb2 # type: ignore @@ -37,7 +39,7 @@ class UserEvent(proto.Message): - r"""UserEvent captures all metadata information DiscoveryEngine + r"""UserEvent captures all metadata information Discovery Engine API needs to know about how end users interact with customers' website. @@ -111,7 +113,7 @@ class UserEvent(proto.Message): length limit of 128 bytes. A session is an aggregation of an end user behavior in a time span. - A general guideline to populate the sesion_id: + A general guideline to populate the session_id: 1. If user has no activity for 30 min, a new session_id should be assigned. @@ -128,18 +130,21 @@ class UserEvent(proto.Message): trigger the event. Highly recommended for user events that are the result of - [PredictionService.Predict][]. This field enables accurate - attribution of recommendation model performance. + [RecommendationService.Recommend][google.cloud.discoveryengine.v1beta.RecommendationService.Recommend]. + This field enables accurate attribution of recommendation + model performance. The value must be one of: - [PredictResponse.attribution_token][] for events that are - the result of [PredictionService.Predict][]. + the result of + [RecommendationService.Recommend][google.cloud.discoveryengine.v1beta.RecommendationService.Recommend]. - [SearchResponse.attribution_token][google.cloud.discoveryengine.v1beta.SearchResponse.attribution_token] for events that are the result of - [SearchService.Search][]. + [SearchService.Search][google.cloud.discoveryengine.v1beta.SearchService.Search]. - [CompleteQueryResponse.attribution_token][] for events - that are the result of [SearchService.CompleteQuery][]. + that are the result of + [CompletionService.CompleteQuery][google.cloud.discoveryengine.v1beta.CompletionService.CompleteQuery]. This token enables us to accurately attribute page view or conversion completion back to the event and the particular @@ -155,15 +160,16 @@ class UserEvent(proto.Message): documents being filtered. One example is for ``search`` events, the associated - [SearchService.SearchRequest][] may contain a filter - expression in [SearchService.SearchRequest.filter][] + [SearchRequest][google.cloud.discoveryengine.v1beta.SearchRequest] + may contain a filter expression in + [SearchRequest.filter][google.cloud.discoveryengine.v1beta.SearchRequest.filter] conforming to https://google.aip.dev/160#filtering. Similarly, for ``view-item-list`` events that are generated - from a [PredictionService.PredictRequest][], this field may - be populated directly from - [PredictionService.PredictRequest.filter][] conforming to - https://google.aip.dev/160#filtering. + from a [RecommendationService.RecommendRequest][], this + field may be populated directly from + [RecommendationService.RecommendRequest.filter][] conforming + to https://google.aip.dev/160#filtering. The value must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an INVALID_ARGUMENT error is @@ -223,12 +229,12 @@ class UserEvent(proto.Message): also include them in the user events that you associate with prediction requests. Custom attribute formatting must be consistent between imported events and events provided with - prediction requests. This lets the DiscoveryEngine API use + prediction requests. This lets the Discovery Engine API use those custom attributes when training models and serving predictions, which helps improve recommendation quality. This field needs to pass all below criteria, otherwise an - INVALID_ARGUMENT error is returned: + ``INVALID_ARGUMENT`` error is returned: - The key must be a UTF-8 encoded string with a length limit of 5,000 characters. @@ -238,10 +244,10 @@ class UserEvent(proto.Message): - For number attributes, at most 400 values are allowed. For product recommendations, an example of extra user - information is traffic_channel, which is how a user arrives - at the site. Users can arrive at the site by coming to the - site directly, coming through Google search, or in other - ways. + information is ``traffic_channel``, which is how a user + arrives at the site. Users can arrive at the site by coming + to the site directly, coming through Google search, or in + other ways. media_info (google.cloud.discoveryengine_v1beta.types.MediaInfo): Media-specific info. """ @@ -416,9 +422,11 @@ class SearchInfo(proto.Message): At least one of [search_query][google.cloud.discoveryengine.v1beta.SearchInfo.search_query] - or [page_categories][] is required for ``search`` events. - Other event types should not set this field. Otherwise, an - INVALID_ARGUMENT error is returned. + or + [PageInfo.page_category][google.cloud.discoveryengine.v1beta.PageInfo.page_category] + is required for ``search`` events. Other event types should + not set this field. Otherwise, an INVALID_ARGUMENT error is + returned. order_by (str): The order in which products are returned, if applicable. @@ -603,7 +611,7 @@ class DocumentInfo(proto.Message): This field is a member of `oneof`_ ``document_descriptor``. name (str): Required. The Document resource full name, of the form: - projects//locations//dataStores//branches//documents/ + ``projects/{project_id}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}/branches/{branch_id}/documents/{document_id}`` This field is a member of `oneof`_ ``document_descriptor``. quantity (int): @@ -708,7 +716,7 @@ class MediaInfo(proto.Message): Media progress should be computed using only the media_progress_duration relative to the media total length. - This value must be between [0, 1.0] inclusive. + This value must be between ``[0, 1.0]`` inclusive. If this is not a playback or the progress cannot be computed (e.g. ongoing livestream), this field should be unset. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event_service.py index c3cb92c39a65..227b3c21ae3e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -36,7 +38,7 @@ class WriteUserEventRequest(proto.Message): Attributes: parent (str): Required. The parent DataStore resource name, such as - ``projects/{project}/locations/{location}/dataStores/{data_store}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. user_event (google.cloud.discoveryengine_v1beta.types.UserEvent): Required. User event to write. @@ -63,7 +65,7 @@ class CollectUserEventRequest(proto.Message): Attributes: parent (str): Required. The parent DataStore resource name, such as - ``projects/{project}/locations/{location}/dataStores/{data_store}``. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}``. user_event (str): Required. URL encoded UserEvent proto with a length limit of 2,000,000 characters. diff --git a/packages/google-cloud-discoveryengine/noxfile.py b/packages/google-cloud-discoveryengine/noxfile.py index fcacc5d528dd..1749edb5dad7 100644 --- a/packages/google-cloud-discoveryengine/noxfile.py +++ b/packages/google-cloud-discoveryengine/noxfile.py @@ -189,9 +189,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -378,8 +378,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_complete_query_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_complete_query_async.py new file mode 100644 index 000000000000..14581dbe520d --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_complete_query_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CompleteQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_CompletionService_CompleteQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_complete_query(): + # Create a client + client = discoveryengine_v1.CompletionServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CompleteQueryRequest( + data_store="data_store_value", + query="query_value", + ) + + # Make the request + response = await client.complete_query(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_CompletionService_CompleteQuery_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_complete_query_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_complete_query_sync.py new file mode 100644 index 000000000000..50d04b4e6670 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_complete_query_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CompleteQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_CompletionService_CompleteQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_complete_query(): + # Create a client + client = discoveryengine_v1.CompletionServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CompleteQueryRequest( + data_store="data_store_value", + query="query_value", + ) + + # Make the request + response = client.complete_query(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_CompletionService_CompleteQuery_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_create_document_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_create_document_async.py new file mode 100644 index 000000000000..14d6ec31e3a0 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_create_document_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_CreateDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_create_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CreateDocumentRequest( + parent="parent_value", + document_id="document_id_value", + ) + + # Make the request + response = await client.create_document(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_CreateDocument_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_create_document_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_create_document_sync.py new file mode 100644 index 000000000000..a79ecac3edeb --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_create_document_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_CreateDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_create_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CreateDocumentRequest( + parent="parent_value", + document_id="document_id_value", + ) + + # Make the request + response = client.create_document(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_CreateDocument_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_delete_document_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_delete_document_async.py new file mode 100644 index 000000000000..35f8a95dbcff --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_delete_document_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_DeleteDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_delete_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + await client.delete_document(request=request) + + +# [END discoveryengine_v1_generated_DocumentService_DeleteDocument_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_delete_document_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_delete_document_sync.py new file mode 100644 index 000000000000..acfccf57dea9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_delete_document_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_DeleteDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_delete_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + client.delete_document(request=request) + + +# [END discoveryengine_v1_generated_DocumentService_DeleteDocument_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_get_document_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_get_document_async.py new file mode 100644 index 000000000000..19d2f47c7346 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_get_document_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_GetDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_get_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_document(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_GetDocument_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_get_document_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_get_document_sync.py new file mode 100644 index 000000000000..cfa3d9116313 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_get_document_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_GetDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_get_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = client.get_document(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_GetDocument_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_import_documents_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_import_documents_async.py new file mode 100644 index 000000000000..a4d14263a95a --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_import_documents_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_ImportDocuments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_import_documents(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ImportDocumentsRequest( + parent="parent_value", + ) + + # Make the request + operation = client.import_documents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_ImportDocuments_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_import_documents_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_import_documents_sync.py new file mode 100644 index 000000000000..7efdfaa97635 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_import_documents_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_ImportDocuments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_import_documents(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ImportDocumentsRequest( + parent="parent_value", + ) + + # Make the request + operation = client.import_documents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_ImportDocuments_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_list_documents_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_list_documents_async.py new file mode 100644 index 000000000000..c89a5b43f31e --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_list_documents_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_ListDocuments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_list_documents(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_DocumentService_ListDocuments_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_list_documents_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_list_documents_sync.py new file mode 100644 index 000000000000..defa790a4f0f --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_list_documents_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_ListDocuments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_list_documents(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_DocumentService_ListDocuments_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_async.py new file mode 100644 index 000000000000..af16bc0ed860 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_PurgeDocuments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_purge_documents(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeDocumentsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_documents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_PurgeDocuments_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_sync.py new file mode 100644 index 000000000000..63767cb67e30 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_PurgeDocuments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_purge_documents(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeDocumentsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_documents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_PurgeDocuments_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_update_document_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_update_document_async.py new file mode 100644 index 000000000000..97232cfa9f42 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_update_document_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_UpdateDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_update_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.UpdateDocumentRequest( + ) + + # Make the request + response = await client.update_document(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_UpdateDocument_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_update_document_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_update_document_sync.py new file mode 100644 index 000000000000..d7424c30f517 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_update_document_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_UpdateDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_update_document(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.UpdateDocumentRequest( + ) + + # Make the request + response = client.update_document(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_UpdateDocument_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_create_schema_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_create_schema_async.py new file mode 100644 index 000000000000..af295a35edf5 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_create_schema_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SchemaService_CreateSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_create_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CreateSchemaRequest( + parent="parent_value", + schema_id="schema_id_value", + ) + + # Make the request + operation = client.create_schema(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SchemaService_CreateSchema_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_create_schema_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_create_schema_sync.py new file mode 100644 index 000000000000..214876fd77d9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_create_schema_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SchemaService_CreateSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_create_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CreateSchemaRequest( + parent="parent_value", + schema_id="schema_id_value", + ) + + # Make the request + operation = client.create_schema(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SchemaService_CreateSchema_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_delete_schema_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_delete_schema_async.py new file mode 100644 index 000000000000..22f0d4c27d13 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_delete_schema_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SchemaService_DeleteSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_delete_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_schema(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SchemaService_DeleteSchema_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_delete_schema_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_delete_schema_sync.py new file mode 100644 index 000000000000..17ca368a500f --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_delete_schema_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SchemaService_DeleteSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_delete_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_schema(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SchemaService_DeleteSchema_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_get_schema_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_get_schema_async.py new file mode 100644 index 000000000000..e410c25c884c --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_get_schema_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SchemaService_GetSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_get_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = await client.get_schema(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SchemaService_GetSchema_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_get_schema_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_get_schema_sync.py new file mode 100644 index 000000000000..45c3a45a901f --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_get_schema_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SchemaService_GetSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_get_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = client.get_schema(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SchemaService_GetSchema_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_list_schemas_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_list_schemas_async.py new file mode 100644 index 000000000000..2094cf3a44dc --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_list_schemas_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSchemas +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SchemaService_ListSchemas_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_list_schemas(): + # Create a client + client = discoveryengine_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_SchemaService_ListSchemas_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_list_schemas_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_list_schemas_sync.py new file mode 100644 index 000000000000..a535905686ba --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_list_schemas_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSchemas +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SchemaService_ListSchemas_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_list_schemas(): + # Create a client + client = discoveryengine_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_SchemaService_ListSchemas_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_update_schema_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_update_schema_async.py new file mode 100644 index 000000000000..72203aebc936 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_update_schema_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SchemaService_UpdateSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_update_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.UpdateSchemaRequest( + ) + + # Make the request + operation = client.update_schema(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SchemaService_UpdateSchema_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_update_schema_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_update_schema_sync.py new file mode 100644 index 000000000000..2f81f181cba1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_schema_service_update_schema_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SchemaService_UpdateSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_update_schema(): + # Create a client + client = discoveryengine_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.UpdateSchemaRequest( + ) + + # Make the request + operation = client.update_schema(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SchemaService_UpdateSchema_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_service_search_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_service_search_async.py new file mode 100644 index 000000000000..87c051ea2c3e --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_service_search_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Search +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SearchService_Search_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_search(): + # Create a client + client = discoveryengine_v1.SearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.SearchRequest( + serving_config="serving_config_value", + ) + + # Make the request + page_result = client.search(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_SearchService_Search_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_service_search_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_service_search_sync.py new file mode 100644 index 000000000000..19f06d573e71 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_service_search_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Search +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SearchService_Search_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_search(): + # Create a client + client = discoveryengine_v1.SearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.SearchRequest( + serving_config="serving_config_value", + ) + + # Make the request + page_result = client.search(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_SearchService_Search_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_collect_user_event_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_collect_user_event_async.py new file mode 100644 index 000000000000..81299fba817f --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_collect_user_event_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CollectUserEvent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_UserEventService_CollectUserEvent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_collect_user_event(): + # Create a client + client = discoveryengine_v1.UserEventServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CollectUserEventRequest( + parent="parent_value", + user_event="user_event_value", + ) + + # Make the request + response = await client.collect_user_event(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_UserEventService_CollectUserEvent_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_collect_user_event_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_collect_user_event_sync.py new file mode 100644 index 000000000000..f4722245ff11 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_collect_user_event_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CollectUserEvent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_UserEventService_CollectUserEvent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_collect_user_event(): + # Create a client + client = discoveryengine_v1.UserEventServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.CollectUserEventRequest( + parent="parent_value", + user_event="user_event_value", + ) + + # Make the request + response = client.collect_user_event(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_UserEventService_CollectUserEvent_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_import_user_events_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_import_user_events_async.py new file mode 100644 index 000000000000..54d3945378dd --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_import_user_events_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportUserEvents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_UserEventService_ImportUserEvents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_import_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceAsyncClient() + + # Initialize request argument(s) + inline_source = discoveryengine_v1.InlineSource() + inline_source.user_events.event_type = "event_type_value" + inline_source.user_events.user_pseudo_id = "user_pseudo_id_value" + + request = discoveryengine_v1.ImportUserEventsRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_user_events(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_UserEventService_ImportUserEvents_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_import_user_events_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_import_user_events_sync.py new file mode 100644 index 000000000000..026c946e9c6b --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_import_user_events_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportUserEvents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_UserEventService_ImportUserEvents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_import_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceClient() + + # Initialize request argument(s) + inline_source = discoveryengine_v1.InlineSource() + inline_source.user_events.event_type = "event_type_value" + inline_source.user_events.user_pseudo_id = "user_pseudo_id_value" + + request = discoveryengine_v1.ImportUserEventsRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_user_events(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_UserEventService_ImportUserEvents_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_write_user_event_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_write_user_event_async.py new file mode 100644 index 000000000000..cce9c8eba81a --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_write_user_event_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for WriteUserEvent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_UserEventService_WriteUserEvent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_write_user_event(): + # Create a client + client = discoveryengine_v1.UserEventServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.WriteUserEventRequest( + parent="parent_value", + ) + + # Make the request + response = await client.write_user_event(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_UserEventService_WriteUserEvent_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_write_user_event_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_write_user_event_sync.py new file mode 100644 index 000000000000..fa4fae2a3e49 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_write_user_event_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for WriteUserEvent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_UserEventService_WriteUserEvent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_write_user_event(): + # Create a client + client = discoveryengine_v1.UserEventServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.WriteUserEventRequest( + parent="parent_value", + ) + + # Make the request + response = client.write_user_event(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_UserEventService_WriteUserEvent_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_complete_query_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_complete_query_async.py new file mode 100644 index 000000000000..9a464f9b603a --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_complete_query_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CompleteQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_CompletionService_CompleteQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_complete_query(): + # Create a client + client = discoveryengine_v1beta.CompletionServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.CompleteQueryRequest( + data_store="data_store_value", + query="query_value", + ) + + # Make the request + response = await client.complete_query(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_CompletionService_CompleteQuery_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_complete_query_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_complete_query_sync.py new file mode 100644 index 000000000000..2b80069aab22 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_complete_query_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CompleteQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_CompletionService_CompleteQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_complete_query(): + # Create a client + client = discoveryengine_v1beta.CompletionServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.CompleteQueryRequest( + data_store="data_store_value", + query="query_value", + ) + + # Make the request + response = client.complete_query(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_CompletionService_CompleteQuery_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_create_document_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_create_document_async.py index 0a561746d14b..105bbd49f91d 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_create_document_async.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_create_document_async.py @@ -39,12 +39,8 @@ async def sample_create_document(): client = discoveryengine_v1beta.DocumentServiceAsyncClient() # Initialize request argument(s) - document = discoveryengine_v1beta.Document() - document.schema_id = "schema_id_value" - request = discoveryengine_v1beta.CreateDocumentRequest( parent="parent_value", - document=document, document_id="document_id_value", ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_create_document_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_create_document_sync.py index 162638b9ec27..a566a8e696eb 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_create_document_sync.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_create_document_sync.py @@ -39,12 +39,8 @@ def sample_create_document(): client = discoveryengine_v1beta.DocumentServiceClient() # Initialize request argument(s) - document = discoveryengine_v1beta.Document() - document.schema_id = "schema_id_value" - request = discoveryengine_v1beta.CreateDocumentRequest( parent="parent_value", - document=document, document_id="document_id_value", ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_import_documents_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_import_documents_async.py index 7ebac6fc2723..2c6c6bdd3317 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_import_documents_async.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_import_documents_async.py @@ -39,11 +39,7 @@ async def sample_import_documents(): client = discoveryengine_v1beta.DocumentServiceAsyncClient() # Initialize request argument(s) - inline_source = discoveryengine_v1beta.InlineSource() - inline_source.documents.schema_id = "schema_id_value" - request = discoveryengine_v1beta.ImportDocumentsRequest( - inline_source=inline_source, parent="parent_value", ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_import_documents_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_import_documents_sync.py index 9bc0e4738550..eb2752c8793c 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_import_documents_sync.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_import_documents_sync.py @@ -39,11 +39,7 @@ def sample_import_documents(): client = discoveryengine_v1beta.DocumentServiceClient() # Initialize request argument(s) - inline_source = discoveryengine_v1beta.InlineSource() - inline_source.documents.schema_id = "schema_id_value" - request = discoveryengine_v1beta.ImportDocumentsRequest( - inline_source=inline_source, parent="parent_value", ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_async.py new file mode 100644 index 000000000000..478746526c43 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_purge_documents(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.PurgeDocumentsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_documents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_sync.py new file mode 100644 index 000000000000..11f4e0c6a8b0 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_purge_documents(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.PurgeDocumentsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_documents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_update_document_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_update_document_async.py index 8e251602e350..fc8c92463b0b 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_update_document_async.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_update_document_async.py @@ -39,11 +39,7 @@ async def sample_update_document(): client = discoveryengine_v1beta.DocumentServiceAsyncClient() # Initialize request argument(s) - document = discoveryengine_v1beta.Document() - document.schema_id = "schema_id_value" - request = discoveryengine_v1beta.UpdateDocumentRequest( - document=document, ) # Make the request diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_update_document_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_update_document_sync.py index ad1b77cd014e..8ea09dccc067 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_update_document_sync.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_update_document_sync.py @@ -39,11 +39,7 @@ def sample_update_document(): client = discoveryengine_v1beta.DocumentServiceClient() # Initialize request argument(s) - document = discoveryengine_v1beta.Document() - document.schema_id = "schema_id_value" - request = discoveryengine_v1beta.UpdateDocumentRequest( - document=document, ) # Make the request diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_create_schema_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_create_schema_async.py new file mode 100644 index 000000000000..3dec2056105e --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_create_schema_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SchemaService_CreateSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_create_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.CreateSchemaRequest( + parent="parent_value", + schema_id="schema_id_value", + ) + + # Make the request + operation = client.create_schema(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SchemaService_CreateSchema_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_create_schema_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_create_schema_sync.py new file mode 100644 index 000000000000..65cf885526c7 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_create_schema_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SchemaService_CreateSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_create_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.CreateSchemaRequest( + parent="parent_value", + schema_id="schema_id_value", + ) + + # Make the request + operation = client.create_schema(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SchemaService_CreateSchema_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_delete_schema_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_delete_schema_async.py new file mode 100644 index 000000000000..737554dc8c58 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_delete_schema_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SchemaService_DeleteSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_delete_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_schema(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SchemaService_DeleteSchema_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_delete_schema_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_delete_schema_sync.py new file mode 100644 index 000000000000..e201445858b0 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_delete_schema_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SchemaService_DeleteSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_delete_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_schema(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SchemaService_DeleteSchema_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_get_schema_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_get_schema_async.py new file mode 100644 index 000000000000..6bd37c493725 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_get_schema_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SchemaService_GetSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_get_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = await client.get_schema(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SchemaService_GetSchema_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_get_schema_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_get_schema_sync.py new file mode 100644 index 000000000000..695b715c42d5 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_get_schema_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SchemaService_GetSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_get_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = client.get_schema(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SchemaService_GetSchema_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_list_schemas_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_list_schemas_async.py new file mode 100644 index 000000000000..2fb0c67c23b7 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_list_schemas_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSchemas +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SchemaService_ListSchemas_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_list_schemas(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_SchemaService_ListSchemas_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_list_schemas_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_list_schemas_sync.py new file mode 100644 index 000000000000..e0eb375fc943 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_list_schemas_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSchemas +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SchemaService_ListSchemas_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_list_schemas(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_SchemaService_ListSchemas_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_update_schema_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_update_schema_async.py new file mode 100644 index 000000000000..b4f1197e71b9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_update_schema_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SchemaService_UpdateSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_update_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.UpdateSchemaRequest( + ) + + # Make the request + operation = client.update_schema(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SchemaService_UpdateSchema_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_update_schema_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_update_schema_sync.py new file mode 100644 index 000000000000..936a4fa489d7 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_schema_service_update_schema_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SchemaService_UpdateSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_update_schema(): + # Create a client + client = discoveryengine_v1beta.SchemaServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.UpdateSchemaRequest( + ) + + # Make the request + operation = client.update_schema(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SchemaService_UpdateSchema_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_search_service_search_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_search_service_search_async.py new file mode 100644 index 000000000000..860f099f0e03 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_search_service_search_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Search +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SearchService_Search_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_search(): + # Create a client + client = discoveryengine_v1beta.SearchServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.SearchRequest( + serving_config="serving_config_value", + ) + + # Make the request + page_result = client.search(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_SearchService_Search_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_search_service_search_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_search_service_search_sync.py new file mode 100644 index 000000000000..2d3167fa08b7 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_search_service_search_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Search +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SearchService_Search_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_search(): + # Create a client + client = discoveryengine_v1beta.SearchServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.SearchRequest( + serving_config="serving_config_value", + ) + + # Make the request + page_result = client.search(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_SearchService_Search_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json new file mode 100644 index 000000000000..e5baf27d3939 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json @@ -0,0 +1,2706 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.discoveryengine.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-discoveryengine", + "version": "0.8.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.CompletionServiceAsyncClient", + "shortName": "CompletionServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.CompletionServiceAsyncClient.complete_query", + "method": { + "fullName": "google.cloud.discoveryengine.v1.CompletionService.CompleteQuery", + "service": { + "fullName": "google.cloud.discoveryengine.v1.CompletionService", + "shortName": "CompletionService" + }, + "shortName": "CompleteQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CompleteQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.CompleteQueryResponse", + "shortName": "complete_query" + }, + "description": "Sample for CompleteQuery", + "file": "discoveryengine_v1_generated_completion_service_complete_query_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_CompletionService_CompleteQuery_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_completion_service_complete_query_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.CompletionServiceClient", + "shortName": "CompletionServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.CompletionServiceClient.complete_query", + "method": { + "fullName": "google.cloud.discoveryengine.v1.CompletionService.CompleteQuery", + "service": { + "fullName": "google.cloud.discoveryengine.v1.CompletionService", + "shortName": "CompletionService" + }, + "shortName": "CompleteQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CompleteQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.CompleteQueryResponse", + "shortName": "complete_query" + }, + "description": "Sample for CompleteQuery", + "file": "discoveryengine_v1_generated_completion_service_complete_query_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_CompletionService_CompleteQuery_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_completion_service_complete_query_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.create_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.CreateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.cloud.discoveryengine_v1.types.Document" + }, + { + "name": "document_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "discoveryengine_v1_generated_document_service_create_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_CreateDocument_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_create_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.create_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.CreateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.cloud.discoveryengine_v1.types.Document" + }, + { + "name": "document_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "discoveryengine_v1_generated_document_service_create_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_CreateDocument_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_create_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.delete_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.DeleteDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "discoveryengine_v1_generated_document_service_delete_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_DeleteDocument_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_delete_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.delete_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.DeleteDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "discoveryengine_v1_generated_document_service_delete_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_DeleteDocument_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_delete_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.get_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.GetDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "discoveryengine_v1_generated_document_service_get_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_GetDocument_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_get_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.get_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.GetDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "discoveryengine_v1_generated_document_service_get_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_GetDocument_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_get_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.import_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.ImportDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ImportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ImportDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_documents" + }, + "description": "Sample for ImportDocuments", + "file": "discoveryengine_v1_generated_document_service_import_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_ImportDocuments_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_import_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.import_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.ImportDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ImportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ImportDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_documents" + }, + "description": "Sample for ImportDocuments", + "file": "discoveryengine_v1_generated_document_service_import_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_ImportDocuments_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_import_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.list_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.ListDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.document_service.pagers.ListDocumentsAsyncPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "discoveryengine_v1_generated_document_service_list_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_ListDocuments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_list_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.list_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.ListDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.document_service.pagers.ListDocumentsPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "discoveryengine_v1_generated_document_service_list_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_ListDocuments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_list_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.purge_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "PurgeDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.PurgeDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "purge_documents" + }, + "description": "Sample for PurgeDocuments", + "file": "discoveryengine_v1_generated_document_service_purge_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_purge_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.purge_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "PurgeDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.PurgeDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "purge_documents" + }, + "description": "Sample for PurgeDocuments", + "file": "discoveryengine_v1_generated_document_service_purge_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_purge_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.update_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.UpdateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "discoveryengine_v1_generated_document_service_update_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_UpdateDocument_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_update_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.update_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.UpdateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "discoveryengine_v1_generated_document_service_update_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_UpdateDocument_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_update_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient.create_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.CreateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.cloud.discoveryengine_v1.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_schema" + }, + "description": "Sample for CreateSchema", + "file": "discoveryengine_v1_generated_schema_service_create_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_CreateSchema_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_create_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient.create_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.CreateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.cloud.discoveryengine_v1.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_schema" + }, + "description": "Sample for CreateSchema", + "file": "discoveryengine_v1_generated_schema_service_create_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_CreateSchema_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_create_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient.delete_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.DeleteSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_schema" + }, + "description": "Sample for DeleteSchema", + "file": "discoveryengine_v1_generated_schema_service_delete_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_DeleteSchema_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_delete_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient.delete_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.DeleteSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_schema" + }, + "description": "Sample for DeleteSchema", + "file": "discoveryengine_v1_generated_schema_service_delete_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_DeleteSchema_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_delete_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient.get_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.GetSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "discoveryengine_v1_generated_schema_service_get_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_GetSchema_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_get_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient.get_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.GetSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "discoveryengine_v1_generated_schema_service_get_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_GetSchema_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_get_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient.list_schemas", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.ListSchemas", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.schema_service.pagers.ListSchemasAsyncPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "discoveryengine_v1_generated_schema_service_list_schemas_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_ListSchemas_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_list_schemas_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient.list_schemas", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.ListSchemas", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.schema_service.pagers.ListSchemasPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "discoveryengine_v1_generated_schema_service_list_schemas_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_ListSchemas_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_list_schemas_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient.update_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.UpdateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "UpdateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateSchemaRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_schema" + }, + "description": "Sample for UpdateSchema", + "file": "discoveryengine_v1_generated_schema_service_update_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_UpdateSchema_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_update_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient.update_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.UpdateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "UpdateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateSchemaRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_schema" + }, + "description": "Sample for UpdateSchema", + "file": "discoveryengine_v1_generated_schema_service_update_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_UpdateSchema_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_update_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SearchServiceAsyncClient", + "shortName": "SearchServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SearchServiceAsyncClient.search", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SearchService.Search", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SearchService", + "shortName": "SearchService" + }, + "shortName": "Search" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.SearchRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.search_service.pagers.SearchAsyncPager", + "shortName": "search" + }, + "description": "Sample for Search", + "file": "discoveryengine_v1_generated_search_service_search_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SearchService_Search_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_search_service_search_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SearchServiceClient", + "shortName": "SearchServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SearchServiceClient.search", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SearchService.Search", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SearchService", + "shortName": "SearchService" + }, + "shortName": "Search" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.SearchRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.search_service.pagers.SearchPager", + "shortName": "search" + }, + "description": "Sample for Search", + "file": "discoveryengine_v1_generated_search_service_search_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SearchService_Search_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_search_service_search_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceAsyncClient", + "shortName": "UserEventServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceAsyncClient.collect_user_event", + "method": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService.CollectUserEvent", + "service": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService", + "shortName": "UserEventService" + }, + "shortName": "CollectUserEvent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CollectUserEventRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api.httpbody_pb2.HttpBody", + "shortName": "collect_user_event" + }, + "description": "Sample for CollectUserEvent", + "file": "discoveryengine_v1_generated_user_event_service_collect_user_event_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_UserEventService_CollectUserEvent_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_user_event_service_collect_user_event_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceClient", + "shortName": "UserEventServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceClient.collect_user_event", + "method": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService.CollectUserEvent", + "service": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService", + "shortName": "UserEventService" + }, + "shortName": "CollectUserEvent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CollectUserEventRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api.httpbody_pb2.HttpBody", + "shortName": "collect_user_event" + }, + "description": "Sample for CollectUserEvent", + "file": "discoveryengine_v1_generated_user_event_service_collect_user_event_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_UserEventService_CollectUserEvent_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_user_event_service_collect_user_event_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceAsyncClient", + "shortName": "UserEventServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceAsyncClient.import_user_events", + "method": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService.ImportUserEvents", + "service": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService", + "shortName": "UserEventService" + }, + "shortName": "ImportUserEvents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ImportUserEventsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_user_events" + }, + "description": "Sample for ImportUserEvents", + "file": "discoveryengine_v1_generated_user_event_service_import_user_events_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_UserEventService_ImportUserEvents_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_user_event_service_import_user_events_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceClient", + "shortName": "UserEventServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceClient.import_user_events", + "method": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService.ImportUserEvents", + "service": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService", + "shortName": "UserEventService" + }, + "shortName": "ImportUserEvents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ImportUserEventsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_user_events" + }, + "description": "Sample for ImportUserEvents", + "file": "discoveryengine_v1_generated_user_event_service_import_user_events_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_UserEventService_ImportUserEvents_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_user_event_service_import_user_events_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceAsyncClient", + "shortName": "UserEventServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceAsyncClient.write_user_event", + "method": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService.WriteUserEvent", + "service": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService", + "shortName": "UserEventService" + }, + "shortName": "WriteUserEvent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.WriteUserEventRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.UserEvent", + "shortName": "write_user_event" + }, + "description": "Sample for WriteUserEvent", + "file": "discoveryengine_v1_generated_user_event_service_write_user_event_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_UserEventService_WriteUserEvent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_user_event_service_write_user_event_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceClient", + "shortName": "UserEventServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceClient.write_user_event", + "method": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService.WriteUserEvent", + "service": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService", + "shortName": "UserEventService" + }, + "shortName": "WriteUserEvent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.WriteUserEventRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.UserEvent", + "shortName": "write_user_event" + }, + "description": "Sample for WriteUserEvent", + "file": "discoveryengine_v1_generated_user_event_service_write_user_event_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_UserEventService_WriteUserEvent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_user_event_service_write_user_event_sync.py" + } + ] +} diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json index 1b60c96ff2b1..a8dda8ffecf0 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json @@ -8,9 +8,162 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.3.1" + "version": "0.8.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.CompletionServiceAsyncClient", + "shortName": "CompletionServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.CompletionServiceAsyncClient.complete_query", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.CompletionService.CompleteQuery", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.CompletionService", + "shortName": "CompletionService" + }, + "shortName": "CompleteQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.CompleteQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.CompleteQueryResponse", + "shortName": "complete_query" + }, + "description": "Sample for CompleteQuery", + "file": "discoveryengine_v1beta_generated_completion_service_complete_query_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_CompletionService_CompleteQuery_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_completion_service_complete_query_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.CompletionServiceClient", + "shortName": "CompletionServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.CompletionServiceClient.complete_query", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.CompletionService.CompleteQuery", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.CompletionService", + "shortName": "CompletionService" + }, + "shortName": "CompleteQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.CompleteQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.CompleteQueryResponse", + "shortName": "complete_query" + }, + "description": "Sample for CompleteQuery", + "file": "discoveryengine_v1beta_generated_completion_service_complete_query_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_CompletionService_CompleteQuery_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_completion_service_complete_query_sync.py" + }, { "canonical": true, "clientMethod": { @@ -68,12 +221,12 @@ "regionTag": "discoveryengine_v1beta_generated_DocumentService_CreateDocument_async", "segments": [ { - "end": 56, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 52, "start": 27, "type": "SHORT" }, @@ -83,18 +236,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -156,12 +309,12 @@ "regionTag": "discoveryengine_v1beta_generated_DocumentService_CreateDocument_sync", "segments": [ { - "end": 56, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 52, "start": 27, "type": "SHORT" }, @@ -171,18 +324,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -549,12 +702,12 @@ "regionTag": "discoveryengine_v1beta_generated_DocumentService_ImportDocuments_async", "segments": [ { - "end": 59, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 55, "start": 27, "type": "SHORT" }, @@ -564,18 +717,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -625,12 +778,12 @@ "regionTag": "discoveryengine_v1beta_generated_DocumentService_ImportDocuments_sync", "segments": [ { - "end": 59, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 55, "start": 27, "type": "SHORT" }, @@ -640,18 +793,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -826,19 +979,19 @@ "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", "shortName": "DocumentServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.update_document", + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.purge_documents", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.UpdateDocument", + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments", "service": { "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", "shortName": "DocumentService" }, - "shortName": "UpdateDocument" + "shortName": "PurgeDocuments" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.UpdateDocumentRequest" + "type": "google.cloud.discoveryengine_v1beta.types.PurgeDocumentsRequest" }, { "name": "retry", @@ -853,22 +1006,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.Document", - "shortName": "update_document" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "purge_documents" }, - "description": "Sample for UpdateDocument", - "file": "discoveryengine_v1beta_generated_document_service_update_document_async.py", + "description": "Sample for PurgeDocuments", + "file": "discoveryengine_v1beta_generated_document_service_purge_documents_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_UpdateDocument_async", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_async", "segments": [ { - "end": 54, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 56, "start": 27, "type": "SHORT" }, @@ -878,22 +1031,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_update_document_async.py" + "title": "discoveryengine_v1beta_generated_document_service_purge_documents_async.py" }, { "canonical": true, @@ -902,19 +1055,19 @@ "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", "shortName": "DocumentServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.update_document", + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.purge_documents", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.UpdateDocument", + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments", "service": { "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", "shortName": "DocumentService" }, - "shortName": "UpdateDocument" + "shortName": "PurgeDocuments" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.UpdateDocumentRequest" + "type": "google.cloud.discoveryengine_v1beta.types.PurgeDocumentsRequest" }, { "name": "retry", @@ -929,22 +1082,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.Document", - "shortName": "update_document" + "resultType": "google.api_core.operation.Operation", + "shortName": "purge_documents" }, - "description": "Sample for UpdateDocument", - "file": "discoveryengine_v1beta_generated_document_service_update_document_sync.py", + "description": "Sample for PurgeDocuments", + "file": "discoveryengine_v1beta_generated_document_service_purge_documents_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_UpdateDocument_sync", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_sync", "segments": [ { - "end": 54, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 56, "start": 27, "type": "SHORT" }, @@ -954,44 +1107,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_update_document_sync.py" + "title": "discoveryengine_v1beta_generated_document_service_purge_documents_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.RecommendationServiceAsyncClient", - "shortName": "RecommendationServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.RecommendationServiceAsyncClient.recommend", + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.update_document", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.RecommendationService.Recommend", + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.UpdateDocument", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.RecommendationService", - "shortName": "RecommendationService" + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" }, - "shortName": "Recommend" + "shortName": "UpdateDocument" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.RecommendRequest" + "type": "google.cloud.discoveryengine_v1beta.types.UpdateDocumentRequest" }, { "name": "retry", @@ -1006,22 +1159,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.RecommendResponse", - "shortName": "recommend" + "resultType": "google.cloud.discoveryengine_v1beta.types.Document", + "shortName": "update_document" }, - "description": "Sample for Recommend", - "file": "discoveryengine_v1beta_generated_recommendation_service_recommend_async.py", + "description": "Sample for UpdateDocument", + "file": "discoveryengine_v1beta_generated_document_service_update_document_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_RecommendationService_Recommend_async", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_UpdateDocument_async", "segments": [ { - "end": 56, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 50, "start": 27, "type": "SHORT" }, @@ -1031,13 +1184,166 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_update_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.update_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.UpdateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Document", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "discoveryengine_v1beta_generated_document_service_update_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_UpdateDocument_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_update_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.RecommendationServiceAsyncClient", + "shortName": "RecommendationServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.RecommendationServiceAsyncClient.recommend", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.RecommendationService.Recommend", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.RecommendationService", + "shortName": "RecommendationService" + }, + "shortName": "Recommend" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.RecommendRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.RecommendResponse", + "shortName": "recommend" + }, + "description": "Sample for Recommend", + "file": "discoveryengine_v1beta_generated_recommendation_service_recommend_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_RecommendationService_Recommend_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { @@ -1124,6 +1430,972 @@ ], "title": "discoveryengine_v1beta_generated_recommendation_service_recommend_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient.create_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.CreateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.cloud.discoveryengine_v1beta.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_schema" + }, + "description": "Sample for CreateSchema", + "file": "discoveryengine_v1beta_generated_schema_service_create_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_CreateSchema_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_create_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient.create_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.CreateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.cloud.discoveryengine_v1beta.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_schema" + }, + "description": "Sample for CreateSchema", + "file": "discoveryengine_v1beta_generated_schema_service_create_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_CreateSchema_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_create_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient.delete_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.DeleteSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_schema" + }, + "description": "Sample for DeleteSchema", + "file": "discoveryengine_v1beta_generated_schema_service_delete_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_DeleteSchema_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_delete_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient.delete_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.DeleteSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_schema" + }, + "description": "Sample for DeleteSchema", + "file": "discoveryengine_v1beta_generated_schema_service_delete_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_DeleteSchema_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_delete_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient.get_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.GetSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "discoveryengine_v1beta_generated_schema_service_get_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_GetSchema_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_get_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient.get_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.GetSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "discoveryengine_v1beta_generated_schema_service_get_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_GetSchema_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_get_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient.list_schemas", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.schema_service.pagers.ListSchemasAsyncPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "discoveryengine_v1beta_generated_schema_service_list_schemas_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_ListSchemas_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_list_schemas_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient.list_schemas", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.schema_service.pagers.ListSchemasPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "discoveryengine_v1beta_generated_schema_service_list_schemas_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_ListSchemas_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_list_schemas_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient.update_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.UpdateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "UpdateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateSchemaRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_schema" + }, + "description": "Sample for UpdateSchema", + "file": "discoveryengine_v1beta_generated_schema_service_update_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_UpdateSchema_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_update_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient.update_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.UpdateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "UpdateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateSchemaRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_schema" + }, + "description": "Sample for UpdateSchema", + "file": "discoveryengine_v1beta_generated_schema_service_update_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_UpdateSchema_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_update_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SearchServiceAsyncClient", + "shortName": "SearchServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SearchServiceAsyncClient.search", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SearchService.Search", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SearchService", + "shortName": "SearchService" + }, + "shortName": "Search" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.SearchRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.search_service.pagers.SearchAsyncPager", + "shortName": "search" + }, + "description": "Sample for Search", + "file": "discoveryengine_v1beta_generated_search_service_search_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SearchService_Search_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_search_service_search_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SearchServiceClient", + "shortName": "SearchServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SearchServiceClient.search", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SearchService.Search", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SearchService", + "shortName": "SearchService" + }, + "shortName": "Search" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.SearchRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.search_service.pagers.SearchPager", + "shortName": "search" + }, + "description": "Sample for Search", + "file": "discoveryengine_v1beta_generated_search_service_search_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SearchService_Search_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_search_service_search_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py new file mode 100644 index 000000000000..ec2b83535813 --- /dev/null +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py @@ -0,0 +1,192 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class discoveryengineCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), + 'complete_query': ('data_store', 'query', 'query_model', 'user_pseudo_id', ), + 'create_document': ('parent', 'document', 'document_id', ), + 'create_schema': ('parent', 'schema', 'schema_id', ), + 'delete_document': ('name', ), + 'delete_schema': ('name', ), + 'get_document': ('name', ), + 'get_schema': ('name', ), + 'import_documents': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'error_config', 'reconciliation_mode', 'auto_generate_ids', 'id_field', ), + 'import_user_events': ('inline_source', 'gcs_source', 'bigquery_source', 'parent', 'error_config', ), + 'list_documents': ('parent', 'page_size', 'page_token', ), + 'list_schemas': ('parent', 'page_size', 'page_token', ), + 'purge_documents': ('parent', 'filter', 'force', ), + 'search': ('serving_config', 'branch', 'query', 'page_size', 'page_token', 'offset', 'user_info', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'safe_search', 'user_labels', ), + 'update_document': ('document', 'allow_missing', ), + 'update_schema': ('schema', 'allow_missing', ), + 'write_user_event': ('parent', 'user_event', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=discoveryengineCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the discoveryengine client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py index 13c8b0d95edd..09d6d0e15c55 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py @@ -40,14 +40,22 @@ class discoveryengineCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), + 'complete_query': ('data_store', 'query', 'query_model', 'user_pseudo_id', ), 'create_document': ('parent', 'document', 'document_id', ), + 'create_schema': ('parent', 'schema', 'schema_id', ), 'delete_document': ('name', ), + 'delete_schema': ('name', ), 'get_document': ('name', ), - 'import_documents': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'error_config', 'reconciliation_mode', ), + 'get_schema': ('name', ), + 'import_documents': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'error_config', 'reconciliation_mode', 'auto_generate_ids', 'id_field', ), 'import_user_events': ('inline_source', 'gcs_source', 'bigquery_source', 'parent', 'error_config', ), 'list_documents': ('parent', 'page_size', 'page_token', ), + 'list_schemas': ('parent', 'page_size', 'page_token', ), + 'purge_documents': ('parent', 'filter', 'force', ), 'recommend': ('serving_config', 'user_event', 'page_size', 'filter', 'validate_only', 'params', 'user_labels', ), + 'search': ('serving_config', 'branch', 'query', 'page_size', 'page_token', 'offset', 'filter', 'order_by', 'user_info', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'safe_search', 'user_labels', ), 'update_document': ('document', 'allow_missing', ), + 'update_schema': ('schema', 'allow_missing', ), 'write_user_event': ('parent', 'user_event', ), } diff --git a/packages/google-cloud-discoveryengine/setup.py b/packages/google-cloud-discoveryengine/setup.py index 3644c516b955..4d17336da508 100644 --- a/packages/google-cloud-discoveryengine/setup.py +++ b/packages/google-cloud-discoveryengine/setup.py @@ -57,9 +57,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.cloud"] setuptools.setup( name=name, diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/__init__.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py new file mode 100644 index 000000000000..2ce5fd9c6a49 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py @@ -0,0 +1,2295 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1.services.completion_service import ( + CompletionServiceAsyncClient, + CompletionServiceClient, + transports, +) +from google.cloud.discoveryengine_v1.types import completion_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CompletionServiceClient._get_default_mtls_endpoint(None) is None + assert ( + CompletionServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + CompletionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CompletionServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CompletionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CompletionServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CompletionServiceClient, "grpc"), + (CompletionServiceAsyncClient, "grpc_asyncio"), + (CompletionServiceClient, "rest"), + ], +) +def test_completion_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CompletionServiceGrpcTransport, "grpc"), + (transports.CompletionServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CompletionServiceRestTransport, "rest"), + ], +) +def test_completion_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CompletionServiceClient, "grpc"), + (CompletionServiceAsyncClient, "grpc_asyncio"), + (CompletionServiceClient, "rest"), + ], +) +def test_completion_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_completion_service_client_get_transport_class(): + transport = CompletionServiceClient.get_transport_class() + available_transports = [ + transports.CompletionServiceGrpcTransport, + transports.CompletionServiceRestTransport, + ] + assert transport in available_transports + + transport = CompletionServiceClient.get_transport_class("grpc") + assert transport == transports.CompletionServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CompletionServiceClient, transports.CompletionServiceGrpcTransport, "grpc"), + ( + CompletionServiceAsyncClient, + transports.CompletionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CompletionServiceClient, transports.CompletionServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + CompletionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CompletionServiceClient), +) +@mock.patch.object( + CompletionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CompletionServiceAsyncClient), +) +def test_completion_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CompletionServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CompletionServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + CompletionServiceClient, + transports.CompletionServiceGrpcTransport, + "grpc", + "true", + ), + ( + CompletionServiceAsyncClient, + transports.CompletionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + CompletionServiceClient, + transports.CompletionServiceGrpcTransport, + "grpc", + "false", + ), + ( + CompletionServiceAsyncClient, + transports.CompletionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + CompletionServiceClient, + transports.CompletionServiceRestTransport, + "rest", + "true", + ), + ( + CompletionServiceClient, + transports.CompletionServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + CompletionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CompletionServiceClient), +) +@mock.patch.object( + CompletionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CompletionServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_completion_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [CompletionServiceClient, CompletionServiceAsyncClient] +) +@mock.patch.object( + CompletionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CompletionServiceClient), +) +@mock.patch.object( + CompletionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CompletionServiceAsyncClient), +) +def test_completion_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CompletionServiceClient, transports.CompletionServiceGrpcTransport, "grpc"), + ( + CompletionServiceAsyncClient, + transports.CompletionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CompletionServiceClient, transports.CompletionServiceRestTransport, "rest"), + ], +) +def test_completion_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CompletionServiceClient, + transports.CompletionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CompletionServiceAsyncClient, + transports.CompletionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + CompletionServiceClient, + transports.CompletionServiceRestTransport, + "rest", + None, + ), + ], +) +def test_completion_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_completion_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1.services.completion_service.transports.CompletionServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CompletionServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CompletionServiceClient, + transports.CompletionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CompletionServiceAsyncClient, + transports.CompletionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_completion_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + completion_service.CompleteQueryRequest, + dict, + ], +) +def test_complete_query(request_type, transport: str = "grpc"): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.complete_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = completion_service.CompleteQueryResponse() + response = client.complete_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == completion_service.CompleteQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, completion_service.CompleteQueryResponse) + + +def test_complete_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.complete_query), "__call__") as call: + client.complete_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == completion_service.CompleteQueryRequest() + + +@pytest.mark.asyncio +async def test_complete_query_async( + transport: str = "grpc_asyncio", + request_type=completion_service.CompleteQueryRequest, +): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.complete_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + completion_service.CompleteQueryResponse() + ) + response = await client.complete_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == completion_service.CompleteQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, completion_service.CompleteQueryResponse) + + +@pytest.mark.asyncio +async def test_complete_query_async_from_dict(): + await test_complete_query_async(request_type=dict) + + +def test_complete_query_field_headers(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = completion_service.CompleteQueryRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.complete_query), "__call__") as call: + call.return_value = completion_service.CompleteQueryResponse() + client.complete_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_complete_query_field_headers_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = completion_service.CompleteQueryRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.complete_query), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + completion_service.CompleteQueryResponse() + ) + await client.complete_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + completion_service.CompleteQueryRequest, + dict, + ], +) +def test_complete_query_rest(request_type): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/dataStores/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = completion_service.CompleteQueryResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = completion_service.CompleteQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.complete_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, completion_service.CompleteQueryResponse) + + +def test_complete_query_rest_required_fields( + request_type=completion_service.CompleteQueryRequest, +): + transport_class = transports.CompletionServiceRestTransport + + request_init = {} + request_init["data_store"] = "" + request_init["query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "query" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).complete_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "query" in jsonified_request + assert jsonified_request["query"] == request_init["query"] + + jsonified_request["dataStore"] = "data_store_value" + jsonified_request["query"] = "query_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).complete_query._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "query", + "query_model", + "user_pseudo_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "dataStore" in jsonified_request + assert jsonified_request["dataStore"] == "data_store_value" + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" + + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = completion_service.CompleteQueryResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = completion_service.CompleteQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.complete_query(request) + + expected_params = [ + ( + "query", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_complete_query_rest_unset_required_fields(): + transport = transports.CompletionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.complete_query._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "query", + "queryModel", + "userPseudoId", + ) + ) + & set( + ( + "dataStore", + "query", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_complete_query_rest_interceptors(null_interceptor): + transport = transports.CompletionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CompletionServiceRestInterceptor(), + ) + client = CompletionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CompletionServiceRestInterceptor, "post_complete_query" + ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, "pre_complete_query" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = completion_service.CompleteQueryRequest.pb( + completion_service.CompleteQueryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = completion_service.CompleteQueryResponse.to_json( + completion_service.CompleteQueryResponse() + ) + + request = completion_service.CompleteQueryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = completion_service.CompleteQueryResponse() + + client.complete_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_complete_query_rest_bad_request( + transport: str = "rest", request_type=completion_service.CompleteQueryRequest +): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/dataStores/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.complete_query(request) + + +def test_complete_query_rest_error(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CompletionServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CompletionServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CompletionServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CompletionServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CompletionServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CompletionServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CompletionServiceGrpcTransport, + transports.CompletionServiceGrpcAsyncIOTransport, + transports.CompletionServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CompletionServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CompletionServiceGrpcTransport, + ) + + +def test_completion_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CompletionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_completion_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1.services.completion_service.transports.CompletionServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CompletionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "complete_query", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_completion_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1.services.completion_service.transports.CompletionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CompletionServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_completion_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1.services.completion_service.transports.CompletionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CompletionServiceTransport() + adc.assert_called_once() + + +def test_completion_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CompletionServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CompletionServiceGrpcTransport, + transports.CompletionServiceGrpcAsyncIOTransport, + ], +) +def test_completion_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CompletionServiceGrpcTransport, + transports.CompletionServiceGrpcAsyncIOTransport, + transports.CompletionServiceRestTransport, + ], +) +def test_completion_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CompletionServiceGrpcTransport, grpc_helpers), + (transports.CompletionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_completion_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CompletionServiceGrpcTransport, + transports.CompletionServiceGrpcAsyncIOTransport, + ], +) +def test_completion_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_completion_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CompletionServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_completion_service_host_no_port(transport_name): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_completion_service_host_with_port(transport_name): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_completion_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CompletionServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CompletionServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.complete_query._session + session2 = client2.transport.complete_query._session + assert session1 != session2 + + +def test_completion_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CompletionServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_completion_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CompletionServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CompletionServiceGrpcTransport, + transports.CompletionServiceGrpcAsyncIOTransport, + ], +) +def test_completion_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CompletionServiceGrpcTransport, + transports.CompletionServiceGrpcAsyncIOTransport, + ], +) +def test_completion_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_store_path(): + project = "squid" + location = "clam" + data_store = "whelk" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = CompletionServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "octopus", + "location": "oyster", + "data_store": "nudibranch", + } + path = CompletionServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = CompletionServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CompletionServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = CompletionServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CompletionServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = CompletionServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = CompletionServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CompletionServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = CompletionServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = CompletionServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CompletionServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = CompletionServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = CompletionServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CompletionServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = CompletionServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = CompletionServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CompletionServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CompletionServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CompletionServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CompletionServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CompletionServiceClient, transports.CompletionServiceGrpcTransport), + ( + CompletionServiceAsyncClient, + transports.CompletionServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py new file mode 100644 index 000000000000..b5a99ce9d8cf --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py @@ -0,0 +1,5636 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import struct_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1.services.document_service import ( + DocumentServiceAsyncClient, + DocumentServiceClient, + pagers, + transports, +) +from google.cloud.discoveryengine_v1.types import ( + document_service, + import_config, + purge_config, +) +from google.cloud.discoveryengine_v1.types import document +from google.cloud.discoveryengine_v1.types import document as gcd_document + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DocumentServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DocumentServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DocumentServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DocumentServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DocumentServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DocumentServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DocumentServiceClient, "grpc"), + (DocumentServiceAsyncClient, "grpc_asyncio"), + (DocumentServiceClient, "rest"), + ], +) +def test_document_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DocumentServiceGrpcTransport, "grpc"), + (transports.DocumentServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DocumentServiceRestTransport, "rest"), + ], +) +def test_document_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DocumentServiceClient, "grpc"), + (DocumentServiceAsyncClient, "grpc_asyncio"), + (DocumentServiceClient, "rest"), + ], +) +def test_document_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_document_service_client_get_transport_class(): + transport = DocumentServiceClient.get_transport_class() + available_transports = [ + transports.DocumentServiceGrpcTransport, + transports.DocumentServiceRestTransport, + ] + assert transport in available_transports + + transport = DocumentServiceClient.get_transport_class("grpc") + assert transport == transports.DocumentServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DocumentServiceClient, transports.DocumentServiceGrpcTransport, "grpc"), + ( + DocumentServiceAsyncClient, + transports.DocumentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DocumentServiceClient, transports.DocumentServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + DocumentServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DocumentServiceClient), +) +@mock.patch.object( + DocumentServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DocumentServiceAsyncClient), +) +def test_document_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DocumentServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DocumentServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + DocumentServiceClient, + transports.DocumentServiceGrpcTransport, + "grpc", + "true", + ), + ( + DocumentServiceAsyncClient, + transports.DocumentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + DocumentServiceClient, + transports.DocumentServiceGrpcTransport, + "grpc", + "false", + ), + ( + DocumentServiceAsyncClient, + transports.DocumentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + DocumentServiceClient, + transports.DocumentServiceRestTransport, + "rest", + "true", + ), + ( + DocumentServiceClient, + transports.DocumentServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + DocumentServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DocumentServiceClient), +) +@mock.patch.object( + DocumentServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DocumentServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_document_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [DocumentServiceClient, DocumentServiceAsyncClient] +) +@mock.patch.object( + DocumentServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DocumentServiceClient), +) +@mock.patch.object( + DocumentServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DocumentServiceAsyncClient), +) +def test_document_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DocumentServiceClient, transports.DocumentServiceGrpcTransport, "grpc"), + ( + DocumentServiceAsyncClient, + transports.DocumentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DocumentServiceClient, transports.DocumentServiceRestTransport, "rest"), + ], +) +def test_document_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DocumentServiceClient, + transports.DocumentServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DocumentServiceAsyncClient, + transports.DocumentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (DocumentServiceClient, transports.DocumentServiceRestTransport, "rest", None), + ], +) +def test_document_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_document_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1.services.document_service.transports.DocumentServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DocumentServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DocumentServiceClient, + transports.DocumentServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DocumentServiceAsyncClient, + transports.DocumentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_document_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.GetDocumentRequest, + dict, + ], +) +def test_get_document(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ), + ) + response = client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.GetDocumentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +def test_get_document_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + client.get_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.GetDocumentRequest() + + +@pytest.mark.asyncio +async def test_get_document_async( + transport: str = "grpc_asyncio", request_type=document_service.GetDocumentRequest +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + ) + ) + response = await client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.GetDocumentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +@pytest.mark.asyncio +async def test_get_document_async_from_dict(): + await test_get_document_async(request_type=dict) + + +def test_get_document_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.GetDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + call.return_value = document.Document() + client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_document_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.GetDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document()) + await client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_document_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = document.Document() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_document( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_document_flattened_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + document_service.GetDocumentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_document_flattened_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = document.Document() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_document( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_document_flattened_error_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_document( + document_service.GetDocumentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.ListDocumentsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.ListDocumentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_documents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + client.list_documents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.ListDocumentsRequest() + + +@pytest.mark.asyncio +async def test_list_documents_async( + transport: str = "grpc_asyncio", request_type=document_service.ListDocumentsRequest +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.ListDocumentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.ListDocumentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_documents_async_from_dict(): + await test_list_documents_async(request_type=dict) + + +def test_list_documents_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.ListDocumentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + call.return_value = document_service.ListDocumentsResponse() + client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_documents_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.ListDocumentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.ListDocumentsResponse() + ) + await client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_documents_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.ListDocumentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_documents( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_documents_flattened_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_documents( + document_service.ListDocumentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_documents_flattened_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.ListDocumentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.ListDocumentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_documents( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_documents_flattened_error_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_documents( + document_service.ListDocumentsRequest(), + parent="parent_value", + ) + + +def test_list_documents_pager(transport_name: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_documents(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document.Document) for i in results) + + +def test_list_documents_pages(transport_name: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + RuntimeError, + ) + pages = list(client.list_documents(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_documents_async_pager(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_documents( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, document.Document) for i in responses) + + +@pytest.mark.asyncio +async def test_list_documents_async_pages(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_documents(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.CreateDocumentRequest, + dict, + ], +) +def test_create_document(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ), + ) + response = client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.CreateDocumentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +def test_create_document_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + client.create_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.CreateDocumentRequest() + + +@pytest.mark.asyncio +async def test_create_document_async( + transport: str = "grpc_asyncio", request_type=document_service.CreateDocumentRequest +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + ) + ) + response = await client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.CreateDocumentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +@pytest.mark.asyncio +async def test_create_document_async_from_dict(): + await test_create_document_async(request_type=dict) + + +def test_create_document_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.CreateDocumentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + call.return_value = gcd_document.Document() + client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_document_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.CreateDocumentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_document.Document() + ) + await client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_document_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_document.Document() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_document( + parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].document + mock_val = gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ) + assert arg == mock_val + arg = args[0].document_id + mock_val = "document_id_value" + assert arg == mock_val + + +def test_create_document_flattened_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_document( + document_service.CreateDocumentRequest(), + parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_document_flattened_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_document.Document() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_document.Document() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_document( + parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].document + mock_val = gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ) + assert arg == mock_val + arg = args[0].document_id + mock_val = "document_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_document_flattened_error_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_document( + document_service.CreateDocumentRequest(), + parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.UpdateDocumentRequest, + dict, + ], +) +def test_update_document(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ), + ) + response = client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.UpdateDocumentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +def test_update_document_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + client.update_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.UpdateDocumentRequest() + + +@pytest.mark.asyncio +async def test_update_document_async( + transport: str = "grpc_asyncio", request_type=document_service.UpdateDocumentRequest +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + ) + ) + response = await client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.UpdateDocumentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +@pytest.mark.asyncio +async def test_update_document_async_from_dict(): + await test_update_document_async(request_type=dict) + + +def test_update_document_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.UpdateDocumentRequest() + + request.document.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + call.return_value = document.Document() + client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "document.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_document_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.UpdateDocumentRequest() + + request.document.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document()) + await client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "document.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.DeleteDocumentRequest, + dict, + ], +) +def test_delete_document(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.DeleteDocumentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_document_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + client.delete_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.DeleteDocumentRequest() + + +@pytest.mark.asyncio +async def test_delete_document_async( + transport: str = "grpc_asyncio", request_type=document_service.DeleteDocumentRequest +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.DeleteDocumentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_document_async_from_dict(): + await test_delete_document_async(request_type=dict) + + +def test_delete_document_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.DeleteDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + call.return_value = None + client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_document_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.DeleteDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_document_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_document( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_document_flattened_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_document( + document_service.DeleteDocumentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_document_flattened_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_document( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_document_flattened_error_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_document( + document_service.DeleteDocumentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + import_config.ImportDocumentsRequest, + dict, + ], +) +def test_import_documents(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.import_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == import_config.ImportDocumentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_documents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_documents), "__call__") as call: + client.import_documents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == import_config.ImportDocumentsRequest() + + +@pytest.mark.asyncio +async def test_import_documents_async( + transport: str = "grpc_asyncio", request_type=import_config.ImportDocumentsRequest +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.import_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == import_config.ImportDocumentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_import_documents_async_from_dict(): + await test_import_documents_async(request_type=dict) + + +def test_import_documents_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = import_config.ImportDocumentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_documents), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_documents_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = import_config.ImportDocumentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_documents), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.import_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeDocumentsRequest, + dict, + ], +) +def test_purge_documents(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.purge_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeDocumentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_purge_documents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_documents), "__call__") as call: + client.purge_documents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeDocumentsRequest() + + +@pytest.mark.asyncio +async def test_purge_documents_async( + transport: str = "grpc_asyncio", request_type=purge_config.PurgeDocumentsRequest +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.purge_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeDocumentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_purge_documents_async_from_dict(): + await test_purge_documents_async(request_type=dict) + + +def test_purge_documents_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeDocumentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_documents), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.purge_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_purge_documents_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeDocumentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_documents), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.purge_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.GetDocumentRequest, + dict, + ], +) +def test_get_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +def test_get_document_rest_required_fields( + request_type=document_service.GetDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_get_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service.GetDocumentRequest.pb( + document_service.GetDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document.Document.to_json(document.Document()) + + request = document_service.GetDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document.Document() + + client.get_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_document_rest_bad_request( + transport: str = "rest", request_type=document_service.GetDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_document(request) + + +def test_get_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + % client.transport._host, + args[1], + ) + + +def test_get_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + document_service.GetDocumentRequest(), + name="name_value", + ) + + +def test_get_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.ListDocumentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_service.ListDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_documents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_documents_rest_required_fields( + request_type=document_service.ListDocumentsRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_documents._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_service.ListDocumentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_service.ListDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_documents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_documents_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_documents_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_list_documents" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_list_documents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service.ListDocumentsRequest.pb( + document_service.ListDocumentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document_service.ListDocumentsResponse.to_json( + document_service.ListDocumentsResponse() + ) + + request = document_service.ListDocumentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_service.ListDocumentsResponse() + + client.list_documents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_documents_rest_bad_request( + transport: str = "rest", request_type=document_service.ListDocumentsRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_documents(request) + + +def test_list_documents_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.ListDocumentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_service.ListDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_documents(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" + % client.transport._host, + args[1], + ) + + +def test_list_documents_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_documents( + document_service.ListDocumentsRequest(), + parent="parent_value", + ) + + +def test_list_documents_rest_pager(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_service.ListDocumentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + pager = client.list_documents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document.Document) for i in results) + + pages = list(client.list_documents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.CreateDocumentRequest, + dict, + ], +) +def test_create_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request_init["document"] = { + "struct_data": {"fields": {}}, + "json_data": "json_data_value", + "name": "name_value", + "id": "id_value", + "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, + "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcd_document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +def test_create_document_rest_required_fields( + request_type=document_service.CreateDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["document_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "documentId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == request_init["document_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["documentId"] = "document_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_document._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("document_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == "document_id_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcd_document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_document(request) + + expected_params = [ + ( + "documentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_document._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("documentId",)) + & set( + ( + "parent", + "document", + "documentId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_create_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_create_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service.CreateDocumentRequest.pb( + document_service.CreateDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_document.Document.to_json( + gcd_document.Document() + ) + + request = document_service.CreateDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_document.Document() + + client.create_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_document_rest_bad_request( + transport: str = "rest", request_type=document_service.CreateDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request_init["document"] = { + "struct_data": {"fields": {}}, + "json_data": "json_data_value", + "name": "name_value", + "id": "id_value", + "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, + "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_document(request) + + +def test_create_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_document.Document() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcd_document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" + % client.transport._host, + args[1], + ) + + +def test_create_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_document( + document_service.CreateDocumentRequest(), + parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", + ) + + +def test_create_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.UpdateDocumentRequest, + dict, + ], +) +def test_update_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + } + request_init["document"] = { + "struct_data": {"fields": {}}, + "json_data": "json_data_value", + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", + "id": "id_value", + "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, + "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +def test_update_document_rest_required_fields( + request_type=document_service.UpdateDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_document._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing",)) & set(("document",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_update_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_update_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service.UpdateDocumentRequest.pb( + document_service.UpdateDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document.Document.to_json(document.Document()) + + request = document_service.UpdateDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document.Document() + + client.update_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_document_rest_bad_request( + transport: str = "rest", request_type=document_service.UpdateDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + } + request_init["document"] = { + "struct_data": {"fields": {}}, + "json_data": "json_data_value", + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", + "id": "id_value", + "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, + "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_document(request) + + +def test_update_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.DeleteDocumentRequest, + dict, + ], +) +def test_delete_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_document(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_document_rest_required_fields( + request_type=document_service.DeleteDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_delete_document" + ) as pre: + pre.assert_not_called() + pb_message = document_service.DeleteDocumentRequest.pb( + document_service.DeleteDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = document_service.DeleteDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_document_rest_bad_request( + transport: str = "rest", request_type=document_service.DeleteDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_document(request) + + +def test_delete_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_document( + document_service.DeleteDocumentRequest(), + name="name_value", + ) + + +def test_delete_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + import_config.ImportDocumentsRequest, + dict, + ], +) +def test_import_documents_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.import_documents(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_import_documents_rest_required_fields( + request_type=import_config.ImportDocumentsRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.import_documents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_import_documents_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.import_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_documents_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_import_documents" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_import_documents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = import_config.ImportDocumentsRequest.pb( + import_config.ImportDocumentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = import_config.ImportDocumentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.import_documents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_documents_rest_bad_request( + transport: str = "rest", request_type=import_config.ImportDocumentsRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.import_documents(request) + + +def test_import_documents_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeDocumentsRequest, + dict, + ], +) +def test_purge_documents_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.purge_documents(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_purge_documents_rest_required_fields( + request_type=purge_config.PurgeDocumentsRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["filter"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.purge_documents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_purge_documents_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.purge_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_purge_documents_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_purge_documents" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_purge_documents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = purge_config.PurgeDocumentsRequest.pb( + purge_config.PurgeDocumentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = purge_config.PurgeDocumentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.purge_documents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_purge_documents_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.purge_documents(request) + + +def test_purge_documents_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DocumentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DocumentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DocumentServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DocumentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DocumentServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DocumentServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DocumentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DocumentServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DocumentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DocumentServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DocumentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DocumentServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DocumentServiceGrpcTransport, + transports.DocumentServiceGrpcAsyncIOTransport, + transports.DocumentServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DocumentServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DocumentServiceGrpcTransport, + ) + + +def test_document_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DocumentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_document_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1.services.document_service.transports.DocumentServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DocumentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_document", + "list_documents", + "create_document", + "update_document", + "delete_document", + "import_documents", + "purge_documents", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_document_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1.services.document_service.transports.DocumentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DocumentServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_document_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1.services.document_service.transports.DocumentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DocumentServiceTransport() + adc.assert_called_once() + + +def test_document_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DocumentServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DocumentServiceGrpcTransport, + transports.DocumentServiceGrpcAsyncIOTransport, + ], +) +def test_document_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DocumentServiceGrpcTransport, + transports.DocumentServiceGrpcAsyncIOTransport, + transports.DocumentServiceRestTransport, + ], +) +def test_document_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DocumentServiceGrpcTransport, grpc_helpers), + (transports.DocumentServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_document_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DocumentServiceGrpcTransport, + transports.DocumentServiceGrpcAsyncIOTransport, + ], +) +def test_document_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_document_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DocumentServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_document_service_rest_lro_client(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_document_service_host_no_port(transport_name): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_document_service_host_with_port(transport_name): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_document_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DocumentServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DocumentServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_document._session + session2 = client2.transport.get_document._session + assert session1 != session2 + session1 = client1.transport.list_documents._session + session2 = client2.transport.list_documents._session + assert session1 != session2 + session1 = client1.transport.create_document._session + session2 = client2.transport.create_document._session + assert session1 != session2 + session1 = client1.transport.update_document._session + session2 = client2.transport.update_document._session + assert session1 != session2 + session1 = client1.transport.delete_document._session + session2 = client2.transport.delete_document._session + assert session1 != session2 + session1 = client1.transport.import_documents._session + session2 = client2.transport.import_documents._session + assert session1 != session2 + session1 = client1.transport.purge_documents._session + session2 = client2.transport.purge_documents._session + assert session1 != session2 + + +def test_document_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DocumentServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_document_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DocumentServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DocumentServiceGrpcTransport, + transports.DocumentServiceGrpcAsyncIOTransport, + ], +) +def test_document_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DocumentServiceGrpcTransport, + transports.DocumentServiceGrpcAsyncIOTransport, + ], +) +def test_document_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_document_service_grpc_lro_client(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_document_service_grpc_lro_async_client(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_branch_path(): + project = "squid" + location = "clam" + data_store = "whelk" + branch = "octopus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + ) + actual = DocumentServiceClient.branch_path(project, location, data_store, branch) + assert expected == actual + + +def test_parse_branch_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "data_store": "cuttlefish", + "branch": "mussel", + } + path = DocumentServiceClient.branch_path(**expected) + + # Check that the path construction is reversible. + actual = DocumentServiceClient.parse_branch_path(path) + assert expected == actual + + +def test_document_path(): + project = "winkle" + location = "nautilus" + data_store = "scallop" + branch = "abalone" + document = "squid" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + document=document, + ) + actual = DocumentServiceClient.document_path( + project, location, data_store, branch, document + ) + assert expected == actual + + +def test_parse_document_path(): + expected = { + "project": "clam", + "location": "whelk", + "data_store": "octopus", + "branch": "oyster", + "document": "nudibranch", + } + path = DocumentServiceClient.document_path(**expected) + + # Check that the path construction is reversible. + actual = DocumentServiceClient.parse_document_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DocumentServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = DocumentServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DocumentServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DocumentServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = DocumentServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DocumentServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DocumentServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = DocumentServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DocumentServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = DocumentServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = DocumentServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DocumentServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DocumentServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = DocumentServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DocumentServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DocumentServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DocumentServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DocumentServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DocumentServiceClient, transports.DocumentServiceGrpcTransport), + (DocumentServiceAsyncClient, transports.DocumentServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_schema_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_schema_service.py new file mode 100644 index 000000000000..02b7127f5ff8 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_schema_service.py @@ -0,0 +1,4735 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import struct_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1.services.schema_service import ( + SchemaServiceAsyncClient, + SchemaServiceClient, + pagers, + transports, +) +from google.cloud.discoveryengine_v1.types import schema +from google.cloud.discoveryengine_v1.types import schema as gcd_schema +from google.cloud.discoveryengine_v1.types import schema_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SchemaServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SchemaServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SchemaServiceClient, "grpc"), + (SchemaServiceAsyncClient, "grpc_asyncio"), + (SchemaServiceClient, "rest"), + ], +) +def test_schema_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SchemaServiceGrpcTransport, "grpc"), + (transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SchemaServiceRestTransport, "rest"), + ], +) +def test_schema_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SchemaServiceClient, "grpc"), + (SchemaServiceAsyncClient, "grpc_asyncio"), + (SchemaServiceClient, "rest"), + ], +) +def test_schema_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_schema_service_client_get_transport_class(): + transport = SchemaServiceClient.get_transport_class() + available_transports = [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceRestTransport, + ] + assert transport in available_transports + + transport = SchemaServiceClient.get_transport_class("grpc") + assert transport == transports.SchemaServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + SchemaServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceAsyncClient), +) +def test_schema_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SchemaServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SchemaServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", "true"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", "false"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest", "true"), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + SchemaServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_schema_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [SchemaServiceClient, SchemaServiceAsyncClient] +) +@mock.patch.object( + SchemaServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceAsyncClient), +) +def test_schema_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest"), + ], +) +def test_schema_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SchemaServiceClient, + transports.SchemaServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest", None), + ], +) +def test_schema_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_schema_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1.services.schema_service.transports.SchemaServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SchemaServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SchemaServiceClient, + transports.SchemaServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_schema_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.GetSchemaRequest, + dict, + ], +) +def test_get_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema( + name="name_value", + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ), + ) + response = client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.GetSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + + +def test_get_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + client.get_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.GetSchemaRequest() + + +@pytest.mark.asyncio +async def test_get_schema_async( + transport: str = "grpc_asyncio", request_type=schema_service.GetSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + ) + ) + response = await client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.GetSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_schema_async_from_dict(): + await test_get_schema_async(request_type=dict) + + +def test_get_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.GetSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + call.return_value = schema.Schema() + client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.GetSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + await client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_schema( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_schema( + schema_service.GetSchemaRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_schema( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_schema( + schema_service.GetSchemaRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.ListSchemasRequest, + dict, + ], +) +def test_list_schemas(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema_service.ListSchemasResponse( + next_page_token="next_page_token_value", + ) + response = client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.ListSchemasRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemasPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_schemas_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + client.list_schemas() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.ListSchemasRequest() + + +@pytest.mark.asyncio +async def test_list_schemas_async( + transport: str = "grpc_asyncio", request_type=schema_service.ListSchemasRequest +): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema_service.ListSchemasResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.ListSchemasRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemasAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_schemas_async_from_dict(): + await test_list_schemas_async(request_type=dict) + + +def test_list_schemas_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.ListSchemasRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + call.return_value = schema_service.ListSchemasResponse() + client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_schemas_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.ListSchemasRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema_service.ListSchemasResponse() + ) + await client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_schemas_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema_service.ListSchemasResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_schemas( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_schemas_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_schemas( + schema_service.ListSchemasRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_schemas_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema_service.ListSchemasResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema_service.ListSchemasResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_schemas( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_schemas_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_schemas( + schema_service.ListSchemasRequest(), + parent="parent_value", + ) + + +def test_list_schemas_pager(transport_name: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema_service.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_schemas(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, schema.Schema) for i in results) + + +def test_list_schemas_pages(transport_name: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema_service.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + pages = list(client.list_schemas(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_schemas_async_pager(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema_service.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_schemas( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, schema.Schema) for i in responses) + + +@pytest.mark.asyncio +async def test_list_schemas_async_pages(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema_service.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_schemas(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.CreateSchemaRequest, + dict, + ], +) +def test_create_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.CreateSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + client.create_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.CreateSchemaRequest() + + +@pytest.mark.asyncio +async def test_create_schema_async( + transport: str = "grpc_asyncio", request_type=schema_service.CreateSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.CreateSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_schema_async_from_dict(): + await test_create_schema_async(request_type=dict) + + +def test_create_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.CreateSchemaRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.CreateSchemaRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_schema( + parent="parent_value", + schema=gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + schema_id="schema_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ) + assert arg == mock_val + arg = args[0].schema_id + mock_val = "schema_id_value" + assert arg == mock_val + + +def test_create_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_schema( + schema_service.CreateSchemaRequest(), + parent="parent_value", + schema=gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + schema_id="schema_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_schema( + parent="parent_value", + schema=gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + schema_id="schema_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ) + assert arg == mock_val + arg = args[0].schema_id + mock_val = "schema_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_schema( + schema_service.CreateSchemaRequest(), + parent="parent_value", + schema=gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + schema_id="schema_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.UpdateSchemaRequest, + dict, + ], +) +def test_update_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.UpdateSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_schema), "__call__") as call: + client.update_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.UpdateSchemaRequest() + + +@pytest.mark.asyncio +async def test_update_schema_async( + transport: str = "grpc_asyncio", request_type=schema_service.UpdateSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.UpdateSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_schema_async_from_dict(): + await test_update_schema_async(request_type=dict) + + +def test_update_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.UpdateSchemaRequest() + + request.schema.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_schema), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "schema.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.UpdateSchemaRequest() + + request.schema.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "schema.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.DeleteSchemaRequest, + dict, + ], +) +def test_delete_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.DeleteSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + client.delete_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.DeleteSchemaRequest() + + +@pytest.mark.asyncio +async def test_delete_schema_async( + transport: str = "grpc_asyncio", request_type=schema_service.DeleteSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.DeleteSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_schema_async_from_dict(): + await test_delete_schema_async(request_type=dict) + + +def test_delete_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.DeleteSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.DeleteSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_schema( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_schema( + schema_service.DeleteSchemaRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_schema( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_schema( + schema_service.DeleteSchemaRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.GetSchemaRequest, + dict, + ], +) +def test_get_schema_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema( + name="name_value", + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + + +def test_get_schema_rest_required_fields(request_type=schema_service.GetSchemaRequest): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_schema._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_get_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_get_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema_service.GetSchemaRequest.pb( + schema_service.GetSchemaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = schema.Schema.to_json(schema.Schema()) + + request = schema_service.GetSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.Schema() + + client.get_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_schema_rest_bad_request( + transport: str = "rest", request_type=schema_service.GetSchemaRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_schema(request) + + +def test_get_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*/schemas/*}" + % client.transport._host, + args[1], + ) + + +def test_get_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_schema( + schema_service.GetSchemaRequest(), + name="name_value", + ) + + +def test_get_schema_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.ListSchemasRequest, + dict, + ], +) +def test_list_schemas_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema_service.ListSchemasResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema_service.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_schemas(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemasPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_schemas_rest_required_fields( + request_type=schema_service.ListSchemasRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_schemas._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_schemas._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema_service.ListSchemasResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = schema_service.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_schemas(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_schemas_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_schemas._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_schemas_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_list_schemas" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_list_schemas" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema_service.ListSchemasRequest.pb( + schema_service.ListSchemasRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = schema_service.ListSchemasResponse.to_json( + schema_service.ListSchemasResponse() + ) + + request = schema_service.ListSchemasRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema_service.ListSchemasResponse() + + client.list_schemas( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_schemas_rest_bad_request( + transport: str = "rest", request_type=schema_service.ListSchemasRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_schemas(request) + + +def test_list_schemas_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema_service.ListSchemasResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema_service.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_schemas(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/dataStores/*}/schemas" + % client.transport._host, + args[1], + ) + + +def test_list_schemas_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_schemas( + schema_service.ListSchemasRequest(), + parent="parent_value", + ) + + +def test_list_schemas_rest_pager(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema_service.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + schema_service.ListSchemasResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + pager = client.list_schemas(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, schema.Schema) for i in results) + + pages = list(client.list_schemas(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.CreateSchemaRequest, + dict, + ], +) +def test_create_schema_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request_init["schema"] = { + "struct_schema": {"fields": {}}, + "json_schema": "json_schema_value", + "name": "name_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_schema(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_schema_rest_required_fields( + request_type=schema_service.CreateSchemaRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["schema_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "schemaId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "schemaId" in jsonified_request + assert jsonified_request["schemaId"] == request_init["schema_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["schemaId"] = "schema_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_schema._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("schema_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "schemaId" in jsonified_request + assert jsonified_request["schemaId"] == "schema_id_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_schema(request) + + expected_params = [ + ( + "schemaId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_schema._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("schemaId",)) + & set( + ( + "parent", + "schema", + "schemaId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_create_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_create_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema_service.CreateSchemaRequest.pb( + schema_service.CreateSchemaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = schema_service.CreateSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_schema_rest_bad_request( + transport: str = "rest", request_type=schema_service.CreateSchemaRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request_init["schema"] = { + "struct_schema": {"fields": {}}, + "json_schema": "json_schema_value", + "name": "name_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_schema(request) + + +def test_create_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + schema=gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + schema_id="schema_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/dataStores/*}/schemas" + % client.transport._host, + args[1], + ) + + +def test_create_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_schema( + schema_service.CreateSchemaRequest(), + parent="parent_value", + schema=gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + schema_id="schema_id_value", + ) + + +def test_create_schema_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.UpdateSchemaRequest, + dict, + ], +) +def test_update_schema_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "schema": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + } + request_init["schema"] = { + "struct_schema": {"fields": {}}, + "json_schema": "json_schema_value", + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_schema(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_schema_rest_required_fields( + request_type=schema_service.UpdateSchemaRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_schema._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_schema._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing",)) & set(("schema",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_update_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_update_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema_service.UpdateSchemaRequest.pb( + schema_service.UpdateSchemaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = schema_service.UpdateSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_schema_rest_bad_request( + transport: str = "rest", request_type=schema_service.UpdateSchemaRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "schema": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + } + request_init["schema"] = { + "struct_schema": {"fields": {}}, + "json_schema": "json_schema_value", + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_schema(request) + + +def test_update_schema_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.DeleteSchemaRequest, + dict, + ], +) +def test_delete_schema_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_schema(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_schema_rest_required_fields( + request_type=schema_service.DeleteSchemaRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_schema._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_delete_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_delete_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema_service.DeleteSchemaRequest.pb( + schema_service.DeleteSchemaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = schema_service.DeleteSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_schema_rest_bad_request( + transport: str = "rest", request_type=schema_service.DeleteSchemaRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_schema(request) + + +def test_delete_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*/schemas/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_schema( + schema_service.DeleteSchemaRequest(), + name="name_value", + ) + + +def test_delete_schema_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SchemaServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SchemaServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + transports.SchemaServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = SchemaServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SchemaServiceGrpcTransport, + ) + + +def test_schema_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SchemaServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_schema_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1.services.schema_service.transports.SchemaServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SchemaServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_schema", + "list_schemas", + "create_schema", + "update_schema", + "delete_schema", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_schema_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SchemaServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_schema_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SchemaServiceTransport() + adc.assert_called_once() + + +def test_schema_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SchemaServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + transports.SchemaServiceRestTransport, + ], +) +def test_schema_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SchemaServiceGrpcTransport, grpc_helpers), + (transports.SchemaServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_schema_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_schema_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SchemaServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_schema_service_rest_lro_client(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_schema_service_host_no_port(transport_name): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_schema_service_host_with_port(transport_name): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_schema_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SchemaServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SchemaServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_schema._session + session2 = client2.transport.get_schema._session + assert session1 != session2 + session1 = client1.transport.list_schemas._session + session2 = client2.transport.list_schemas._session + assert session1 != session2 + session1 = client1.transport.create_schema._session + session2 = client2.transport.create_schema._session + assert session1 != session2 + session1 = client1.transport.update_schema._session + session2 = client2.transport.update_schema._session + assert session1 != session2 + session1 = client1.transport.delete_schema._session + session2 = client2.transport.delete_schema._session + assert session1 != session2 + + +def test_schema_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SchemaServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_schema_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SchemaServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_schema_service_grpc_lro_client(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_schema_service_grpc_lro_async_client(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_store_path(): + project = "squid" + location = "clam" + data_store = "whelk" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = SchemaServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "octopus", + "location": "oyster", + "data_store": "nudibranch", + } + path = SchemaServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_schema_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + schema = "nautilus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/schemas/{schema}".format( + project=project, + location=location, + data_store=data_store, + schema=schema, + ) + actual = SchemaServiceClient.schema_path(project, location, data_store, schema) + assert expected == actual + + +def test_parse_schema_path(): + expected = { + "project": "scallop", + "location": "abalone", + "data_store": "squid", + "schema": "clam", + } + path = SchemaServiceClient.schema_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_schema_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SchemaServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = SchemaServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SchemaServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = SchemaServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SchemaServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = SchemaServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = SchemaServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = SchemaServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SchemaServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = SchemaServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SchemaServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SchemaServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SchemaServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport), + (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_service.py new file mode 100644 index 000000000000..1e370f38c6dc --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_service.py @@ -0,0 +1,2567 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import struct_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1.services.search_service import ( + SearchServiceAsyncClient, + SearchServiceClient, + pagers, + transports, +) +from google.cloud.discoveryengine_v1.types import common, search_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SearchServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SearchServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SearchServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SearchServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SearchServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SearchServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SearchServiceClient, "grpc"), + (SearchServiceAsyncClient, "grpc_asyncio"), + (SearchServiceClient, "rest"), + ], +) +def test_search_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SearchServiceGrpcTransport, "grpc"), + (transports.SearchServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SearchServiceRestTransport, "rest"), + ], +) +def test_search_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SearchServiceClient, "grpc"), + (SearchServiceAsyncClient, "grpc_asyncio"), + (SearchServiceClient, "rest"), + ], +) +def test_search_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_search_service_client_get_transport_class(): + transport = SearchServiceClient.get_transport_class() + available_transports = [ + transports.SearchServiceGrpcTransport, + transports.SearchServiceRestTransport, + ] + assert transport in available_transports + + transport = SearchServiceClient.get_transport_class("grpc") + assert transport == transports.SearchServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SearchServiceClient, transports.SearchServiceGrpcTransport, "grpc"), + ( + SearchServiceAsyncClient, + transports.SearchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SearchServiceClient, transports.SearchServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + SearchServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchServiceClient), +) +@mock.patch.object( + SearchServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchServiceAsyncClient), +) +def test_search_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SearchServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SearchServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (SearchServiceClient, transports.SearchServiceGrpcTransport, "grpc", "true"), + ( + SearchServiceAsyncClient, + transports.SearchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (SearchServiceClient, transports.SearchServiceGrpcTransport, "grpc", "false"), + ( + SearchServiceAsyncClient, + transports.SearchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (SearchServiceClient, transports.SearchServiceRestTransport, "rest", "true"), + (SearchServiceClient, transports.SearchServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + SearchServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchServiceClient), +) +@mock.patch.object( + SearchServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_search_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [SearchServiceClient, SearchServiceAsyncClient] +) +@mock.patch.object( + SearchServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchServiceClient), +) +@mock.patch.object( + SearchServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchServiceAsyncClient), +) +def test_search_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SearchServiceClient, transports.SearchServiceGrpcTransport, "grpc"), + ( + SearchServiceAsyncClient, + transports.SearchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SearchServiceClient, transports.SearchServiceRestTransport, "rest"), + ], +) +def test_search_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SearchServiceClient, + transports.SearchServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SearchServiceAsyncClient, + transports.SearchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (SearchServiceClient, transports.SearchServiceRestTransport, "rest", None), + ], +) +def test_search_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_search_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1.services.search_service.transports.SearchServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SearchServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SearchServiceClient, + transports.SearchServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SearchServiceAsyncClient, + transports.SearchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_search_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + search_service.SearchRequest, + dict, + ], +) +def test_search(request_type, transport: str = "grpc"): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = search_service.SearchResponse( + total_size=1086, + attribution_token="attribution_token_value", + next_page_token="next_page_token_value", + corrected_query="corrected_query_value", + ) + response = client.search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == search_service.SearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchPager) + assert response.total_size == 1086 + assert response.attribution_token == "attribution_token_value" + assert response.next_page_token == "next_page_token_value" + assert response.corrected_query == "corrected_query_value" + + +def test_search_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search), "__call__") as call: + client.search() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_service.SearchRequest() + + +@pytest.mark.asyncio +async def test_search_async( + transport: str = "grpc_asyncio", request_type=search_service.SearchRequest +): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_service.SearchResponse( + total_size=1086, + attribution_token="attribution_token_value", + next_page_token="next_page_token_value", + corrected_query="corrected_query_value", + ) + ) + response = await client.search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == search_service.SearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAsyncPager) + assert response.total_size == 1086 + assert response.attribution_token == "attribution_token_value" + assert response.next_page_token == "next_page_token_value" + assert response.corrected_query == "corrected_query_value" + + +@pytest.mark.asyncio +async def test_search_async_from_dict(): + await test_search_async(request_type=dict) + + +def test_search_field_headers(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_service.SearchRequest() + + request.serving_config = "serving_config_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search), "__call__") as call: + call.return_value = search_service.SearchResponse() + client.search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "serving_config=serving_config_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_search_field_headers_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_service.SearchRequest() + + request.serving_config = "serving_config_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_service.SearchResponse() + ) + await client.search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "serving_config=serving_config_value", + ) in kw["metadata"] + + +def test_search_pager(transport_name: str = "grpc"): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + next_page_token="abc", + ), + search_service.SearchResponse( + results=[], + next_page_token="def", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + ], + next_page_token="ghi", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("serving_config", ""),)), + ) + pager = client.search(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, search_service.SearchResponse.SearchResult) for i in results + ) + + +def test_search_pages(transport_name: str = "grpc"): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + next_page_token="abc", + ), + search_service.SearchResponse( + results=[], + next_page_token="def", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + ], + next_page_token="ghi", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + ), + RuntimeError, + ) + pages = list(client.search(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_search_async_pager(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + next_page_token="abc", + ), + search_service.SearchResponse( + results=[], + next_page_token="def", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + ], + next_page_token="ghi", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + ), + RuntimeError, + ) + async_pager = await client.search( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, search_service.SearchResponse.SearchResult) for i in responses + ) + + +@pytest.mark.asyncio +async def test_search_async_pages(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + next_page_token="abc", + ), + search_service.SearchResponse( + results=[], + next_page_token="def", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + ], + next_page_token="ghi", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.search(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + search_service.SearchRequest, + dict, + ], +) +def test_search_rest(request_type): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "serving_config": "projects/sample1/locations/sample2/dataStores/sample3/servingConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = search_service.SearchResponse( + total_size=1086, + attribution_token="attribution_token_value", + next_page_token="next_page_token_value", + corrected_query="corrected_query_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = search_service.SearchResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.search(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchPager) + assert response.total_size == 1086 + assert response.attribution_token == "attribution_token_value" + assert response.next_page_token == "next_page_token_value" + assert response.corrected_query == "corrected_query_value" + + +def test_search_rest_required_fields(request_type=search_service.SearchRequest): + transport_class = transports.SearchServiceRestTransport + + request_init = {} + request_init["serving_config"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["servingConfig"] = "serving_config_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "servingConfig" in jsonified_request + assert jsonified_request["servingConfig"] == "serving_config_value" + + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = search_service.SearchResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = search_service.SearchResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.search(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_search_rest_unset_required_fields(): + transport = transports.SearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.search._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("servingConfig",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_rest_interceptors(null_interceptor): + transport = transports.SearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SearchServiceRestInterceptor(), + ) + client = SearchServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SearchServiceRestInterceptor, "post_search" + ) as post, mock.patch.object( + transports.SearchServiceRestInterceptor, "pre_search" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = search_service.SearchRequest.pb(search_service.SearchRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = search_service.SearchResponse.to_json( + search_service.SearchResponse() + ) + + request = search_service.SearchRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = search_service.SearchResponse() + + client.search( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_rest_bad_request( + transport: str = "rest", request_type=search_service.SearchRequest +): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "serving_config": "projects/sample1/locations/sample2/dataStores/sample3/servingConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search(request) + + +def test_search_rest_pager(transport: str = "rest"): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + next_page_token="abc", + ), + search_service.SearchResponse( + results=[], + next_page_token="def", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + ], + next_page_token="ghi", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(search_service.SearchResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "serving_config": "projects/sample1/locations/sample2/dataStores/sample3/servingConfigs/sample4" + } + + pager = client.search(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, search_service.SearchResponse.SearchResult) for i in results + ) + + pages = list(client.search(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SearchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SearchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SearchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SearchServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SearchServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SearchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SearchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SearchServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SearchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SearchServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchServiceGrpcTransport, + transports.SearchServiceGrpcAsyncIOTransport, + transports.SearchServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = SearchServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SearchServiceGrpcTransport, + ) + + +def test_search_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SearchServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_search_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1.services.search_service.transports.SearchServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SearchServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "search", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_search_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1.services.search_service.transports.SearchServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SearchServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_search_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1.services.search_service.transports.SearchServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SearchServiceTransport() + adc.assert_called_once() + + +def test_search_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SearchServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchServiceGrpcTransport, + transports.SearchServiceGrpcAsyncIOTransport, + ], +) +def test_search_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchServiceGrpcTransport, + transports.SearchServiceGrpcAsyncIOTransport, + transports.SearchServiceRestTransport, + ], +) +def test_search_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SearchServiceGrpcTransport, grpc_helpers), + (transports.SearchServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_search_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchServiceGrpcTransport, + transports.SearchServiceGrpcAsyncIOTransport, + ], +) +def test_search_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_search_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SearchServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_search_service_host_no_port(transport_name): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_search_service_host_with_port(transport_name): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_search_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SearchServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SearchServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.search._session + session2 = client2.transport.search._session + assert session1 != session2 + + +def test_search_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SearchServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_search_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SearchServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchServiceGrpcTransport, + transports.SearchServiceGrpcAsyncIOTransport, + ], +) +def test_search_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchServiceGrpcTransport, + transports.SearchServiceGrpcAsyncIOTransport, + ], +) +def test_search_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_branch_path(): + project = "squid" + location = "clam" + data_store = "whelk" + branch = "octopus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + ) + actual = SearchServiceClient.branch_path(project, location, data_store, branch) + assert expected == actual + + +def test_parse_branch_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "data_store": "cuttlefish", + "branch": "mussel", + } + path = SearchServiceClient.branch_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_branch_path(path) + assert expected == actual + + +def test_document_path(): + project = "winkle" + location = "nautilus" + data_store = "scallop" + branch = "abalone" + document = "squid" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + document=document, + ) + actual = SearchServiceClient.document_path( + project, location, data_store, branch, document + ) + assert expected == actual + + +def test_parse_document_path(): + expected = { + "project": "clam", + "location": "whelk", + "data_store": "octopus", + "branch": "oyster", + "document": "nudibranch", + } + path = SearchServiceClient.document_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_document_path(path) + assert expected == actual + + +def test_serving_config_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + serving_config = "nautilus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/servingConfigs/{serving_config}".format( + project=project, + location=location, + data_store=data_store, + serving_config=serving_config, + ) + actual = SearchServiceClient.serving_config_path( + project, location, data_store, serving_config + ) + assert expected == actual + + +def test_parse_serving_config_path(): + expected = { + "project": "scallop", + "location": "abalone", + "data_store": "squid", + "serving_config": "clam", + } + path = SearchServiceClient.serving_config_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_serving_config_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SearchServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = SearchServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SearchServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = SearchServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SearchServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = SearchServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = SearchServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = SearchServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SearchServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = SearchServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SearchServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SearchServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SearchServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SearchServiceClient, transports.SearchServiceGrpcTransport), + (SearchServiceAsyncClient, transports.SearchServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py new file mode 100644 index 000000000000..20e6590450a3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py @@ -0,0 +1,3321 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api import httpbody_pb2 # type: ignore +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1.services.user_event_service import ( + UserEventServiceAsyncClient, + UserEventServiceClient, + transports, +) +from google.cloud.discoveryengine_v1.types import ( + common, + import_config, + user_event, + user_event_service, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert UserEventServiceClient._get_default_mtls_endpoint(None) is None + assert ( + UserEventServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + UserEventServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + UserEventServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + UserEventServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + UserEventServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (UserEventServiceClient, "grpc"), + (UserEventServiceAsyncClient, "grpc_asyncio"), + (UserEventServiceClient, "rest"), + ], +) +def test_user_event_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.UserEventServiceGrpcTransport, "grpc"), + (transports.UserEventServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.UserEventServiceRestTransport, "rest"), + ], +) +def test_user_event_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (UserEventServiceClient, "grpc"), + (UserEventServiceAsyncClient, "grpc_asyncio"), + (UserEventServiceClient, "rest"), + ], +) +def test_user_event_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_user_event_service_client_get_transport_class(): + transport = UserEventServiceClient.get_transport_class() + available_transports = [ + transports.UserEventServiceGrpcTransport, + transports.UserEventServiceRestTransport, + ] + assert transport in available_transports + + transport = UserEventServiceClient.get_transport_class("grpc") + assert transport == transports.UserEventServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (UserEventServiceClient, transports.UserEventServiceGrpcTransport, "grpc"), + ( + UserEventServiceAsyncClient, + transports.UserEventServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (UserEventServiceClient, transports.UserEventServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + UserEventServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(UserEventServiceClient), +) +@mock.patch.object( + UserEventServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(UserEventServiceAsyncClient), +) +def test_user_event_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(UserEventServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(UserEventServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + UserEventServiceClient, + transports.UserEventServiceGrpcTransport, + "grpc", + "true", + ), + ( + UserEventServiceAsyncClient, + transports.UserEventServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + UserEventServiceClient, + transports.UserEventServiceGrpcTransport, + "grpc", + "false", + ), + ( + UserEventServiceAsyncClient, + transports.UserEventServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + UserEventServiceClient, + transports.UserEventServiceRestTransport, + "rest", + "true", + ), + ( + UserEventServiceClient, + transports.UserEventServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + UserEventServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(UserEventServiceClient), +) +@mock.patch.object( + UserEventServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(UserEventServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_user_event_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [UserEventServiceClient, UserEventServiceAsyncClient] +) +@mock.patch.object( + UserEventServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(UserEventServiceClient), +) +@mock.patch.object( + UserEventServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(UserEventServiceAsyncClient), +) +def test_user_event_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (UserEventServiceClient, transports.UserEventServiceGrpcTransport, "grpc"), + ( + UserEventServiceAsyncClient, + transports.UserEventServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (UserEventServiceClient, transports.UserEventServiceRestTransport, "rest"), + ], +) +def test_user_event_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + UserEventServiceClient, + transports.UserEventServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + UserEventServiceAsyncClient, + transports.UserEventServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + UserEventServiceClient, + transports.UserEventServiceRestTransport, + "rest", + None, + ), + ], +) +def test_user_event_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_user_event_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1.services.user_event_service.transports.UserEventServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = UserEventServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + UserEventServiceClient, + transports.UserEventServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + UserEventServiceAsyncClient, + transports.UserEventServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_user_event_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + user_event_service.WriteUserEventRequest, + dict, + ], +) +def test_write_user_event(request_type, transport: str = "grpc"): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.write_user_event), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = user_event.UserEvent( + event_type="event_type_value", + user_pseudo_id="user_pseudo_id_value", + direct_user_request=True, + session_id="session_id_value", + attribution_token="attribution_token_value", + filter="filter_value", + tag_ids=["tag_ids_value"], + promotion_ids=["promotion_ids_value"], + ) + response = client.write_user_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == user_event_service.WriteUserEventRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, user_event.UserEvent) + assert response.event_type == "event_type_value" + assert response.user_pseudo_id == "user_pseudo_id_value" + assert response.direct_user_request is True + assert response.session_id == "session_id_value" + assert response.attribution_token == "attribution_token_value" + assert response.filter == "filter_value" + assert response.tag_ids == ["tag_ids_value"] + assert response.promotion_ids == ["promotion_ids_value"] + + +def test_write_user_event_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.write_user_event), "__call__") as call: + client.write_user_event() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == user_event_service.WriteUserEventRequest() + + +@pytest.mark.asyncio +async def test_write_user_event_async( + transport: str = "grpc_asyncio", + request_type=user_event_service.WriteUserEventRequest, +): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.write_user_event), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + user_event.UserEvent( + event_type="event_type_value", + user_pseudo_id="user_pseudo_id_value", + direct_user_request=True, + session_id="session_id_value", + attribution_token="attribution_token_value", + filter="filter_value", + tag_ids=["tag_ids_value"], + promotion_ids=["promotion_ids_value"], + ) + ) + response = await client.write_user_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == user_event_service.WriteUserEventRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, user_event.UserEvent) + assert response.event_type == "event_type_value" + assert response.user_pseudo_id == "user_pseudo_id_value" + assert response.direct_user_request is True + assert response.session_id == "session_id_value" + assert response.attribution_token == "attribution_token_value" + assert response.filter == "filter_value" + assert response.tag_ids == ["tag_ids_value"] + assert response.promotion_ids == ["promotion_ids_value"] + + +@pytest.mark.asyncio +async def test_write_user_event_async_from_dict(): + await test_write_user_event_async(request_type=dict) + + +def test_write_user_event_field_headers(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = user_event_service.WriteUserEventRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.write_user_event), "__call__") as call: + call.return_value = user_event.UserEvent() + client.write_user_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_write_user_event_field_headers_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = user_event_service.WriteUserEventRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.write_user_event), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + user_event.UserEvent() + ) + await client.write_user_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + user_event_service.CollectUserEventRequest, + dict, + ], +) +def test_collect_user_event(request_type, transport: str = "grpc"): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.collect_user_event), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = httpbody_pb2.HttpBody( + content_type="content_type_value", + data=b"data_blob", + ) + response = client.collect_user_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == user_event_service.CollectUserEventRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, httpbody_pb2.HttpBody) + assert response.content_type == "content_type_value" + assert response.data == b"data_blob" + + +def test_collect_user_event_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.collect_user_event), "__call__" + ) as call: + client.collect_user_event() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == user_event_service.CollectUserEventRequest() + + +@pytest.mark.asyncio +async def test_collect_user_event_async( + transport: str = "grpc_asyncio", + request_type=user_event_service.CollectUserEventRequest, +): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.collect_user_event), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + httpbody_pb2.HttpBody( + content_type="content_type_value", + data=b"data_blob", + ) + ) + response = await client.collect_user_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == user_event_service.CollectUserEventRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, httpbody_pb2.HttpBody) + assert response.content_type == "content_type_value" + assert response.data == b"data_blob" + + +@pytest.mark.asyncio +async def test_collect_user_event_async_from_dict(): + await test_collect_user_event_async(request_type=dict) + + +def test_collect_user_event_field_headers(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = user_event_service.CollectUserEventRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.collect_user_event), "__call__" + ) as call: + call.return_value = httpbody_pb2.HttpBody() + client.collect_user_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_collect_user_event_field_headers_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = user_event_service.CollectUserEventRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.collect_user_event), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + httpbody_pb2.HttpBody() + ) + await client.collect_user_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + import_config.ImportUserEventsRequest, + dict, + ], +) +def test_import_user_events(request_type, transport: str = "grpc"): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_user_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.import_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == import_config.ImportUserEventsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_user_events_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_user_events), "__call__" + ) as call: + client.import_user_events() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == import_config.ImportUserEventsRequest() + + +@pytest.mark.asyncio +async def test_import_user_events_async( + transport: str = "grpc_asyncio", request_type=import_config.ImportUserEventsRequest +): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_user_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.import_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == import_config.ImportUserEventsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_import_user_events_async_from_dict(): + await test_import_user_events_async(request_type=dict) + + +def test_import_user_events_field_headers(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = import_config.ImportUserEventsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_user_events), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_user_events_field_headers_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = import_config.ImportUserEventsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_user_events), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.import_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + user_event_service.WriteUserEventRequest, + dict, + ], +) +def test_write_user_event_rest(request_type): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request_init["user_event"] = { + "event_type": "event_type_value", + "user_pseudo_id": "user_pseudo_id_value", + "event_time": {"seconds": 751, "nanos": 543}, + "user_info": {"user_id": "user_id_value", "user_agent": "user_agent_value"}, + "direct_user_request": True, + "session_id": "session_id_value", + "page_info": { + "pageview_id": "pageview_id_value", + "page_category": "page_category_value", + "uri": "uri_value", + "referrer_uri": "referrer_uri_value", + }, + "attribution_token": "attribution_token_value", + "filter": "filter_value", + "documents": [ + { + "id": "id_value", + "name": "name_value", + "quantity": 895, + "promotion_ids": ["promotion_ids_value1", "promotion_ids_value2"], + } + ], + "panel": { + "panel_id": "panel_id_value", + "display_name": "display_name_value", + "panel_position": 1508, + "total_panels": 1286, + }, + "search_info": { + "search_query": "search_query_value", + "order_by": "order_by_value", + "offset": 647, + }, + "completion_info": { + "selected_suggestion": "selected_suggestion_value", + "selected_position": 1821, + }, + "transaction_info": { + "value": 0.541, + "currency": "currency_value", + "transaction_id": "transaction_id_value", + "tax": 0.333, + "cost": 0.441, + "discount_value": 0.1509, + }, + "tag_ids": ["tag_ids_value1", "tag_ids_value2"], + "promotion_ids": ["promotion_ids_value1", "promotion_ids_value2"], + "attributes": {}, + "media_info": { + "media_progress_duration": {"seconds": 751, "nanos": 543}, + "media_progress_percentage": 0.2641, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = user_event.UserEvent( + event_type="event_type_value", + user_pseudo_id="user_pseudo_id_value", + direct_user_request=True, + session_id="session_id_value", + attribution_token="attribution_token_value", + filter="filter_value", + tag_ids=["tag_ids_value"], + promotion_ids=["promotion_ids_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = user_event.UserEvent.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.write_user_event(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, user_event.UserEvent) + assert response.event_type == "event_type_value" + assert response.user_pseudo_id == "user_pseudo_id_value" + assert response.direct_user_request is True + assert response.session_id == "session_id_value" + assert response.attribution_token == "attribution_token_value" + assert response.filter == "filter_value" + assert response.tag_ids == ["tag_ids_value"] + assert response.promotion_ids == ["promotion_ids_value"] + + +def test_write_user_event_rest_required_fields( + request_type=user_event_service.WriteUserEventRequest, +): + transport_class = transports.UserEventServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).write_user_event._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).write_user_event._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = user_event.UserEvent() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = user_event.UserEvent.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.write_user_event(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_write_user_event_rest_unset_required_fields(): + transport = transports.UserEventServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.write_user_event._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "userEvent", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_write_user_event_rest_interceptors(null_interceptor): + transport = transports.UserEventServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.UserEventServiceRestInterceptor(), + ) + client = UserEventServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.UserEventServiceRestInterceptor, "post_write_user_event" + ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, "pre_write_user_event" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = user_event_service.WriteUserEventRequest.pb( + user_event_service.WriteUserEventRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = user_event.UserEvent.to_json(user_event.UserEvent()) + + request = user_event_service.WriteUserEventRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = user_event.UserEvent() + + client.write_user_event( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_write_user_event_rest_bad_request( + transport: str = "rest", request_type=user_event_service.WriteUserEventRequest +): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request_init["user_event"] = { + "event_type": "event_type_value", + "user_pseudo_id": "user_pseudo_id_value", + "event_time": {"seconds": 751, "nanos": 543}, + "user_info": {"user_id": "user_id_value", "user_agent": "user_agent_value"}, + "direct_user_request": True, + "session_id": "session_id_value", + "page_info": { + "pageview_id": "pageview_id_value", + "page_category": "page_category_value", + "uri": "uri_value", + "referrer_uri": "referrer_uri_value", + }, + "attribution_token": "attribution_token_value", + "filter": "filter_value", + "documents": [ + { + "id": "id_value", + "name": "name_value", + "quantity": 895, + "promotion_ids": ["promotion_ids_value1", "promotion_ids_value2"], + } + ], + "panel": { + "panel_id": "panel_id_value", + "display_name": "display_name_value", + "panel_position": 1508, + "total_panels": 1286, + }, + "search_info": { + "search_query": "search_query_value", + "order_by": "order_by_value", + "offset": 647, + }, + "completion_info": { + "selected_suggestion": "selected_suggestion_value", + "selected_position": 1821, + }, + "transaction_info": { + "value": 0.541, + "currency": "currency_value", + "transaction_id": "transaction_id_value", + "tax": 0.333, + "cost": 0.441, + "discount_value": 0.1509, + }, + "tag_ids": ["tag_ids_value1", "tag_ids_value2"], + "promotion_ids": ["promotion_ids_value1", "promotion_ids_value2"], + "attributes": {}, + "media_info": { + "media_progress_duration": {"seconds": 751, "nanos": 543}, + "media_progress_percentage": 0.2641, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.write_user_event(request) + + +def test_write_user_event_rest_error(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + user_event_service.CollectUserEventRequest, + dict, + ], +) +def test_collect_user_event_rest(request_type): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = httpbody_pb2.HttpBody( + content_type="content_type_value", + data=b"data_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.collect_user_event(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, httpbody_pb2.HttpBody) + assert response.content_type == "content_type_value" + assert response.data == b"data_blob" + + +def test_collect_user_event_rest_required_fields( + request_type=user_event_service.CollectUserEventRequest, +): + transport_class = transports.UserEventServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["user_event"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "userEvent" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).collect_user_event._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "userEvent" in jsonified_request + assert jsonified_request["userEvent"] == request_init["user_event"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["userEvent"] = "user_event_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).collect_user_event._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "ets", + "uri", + "user_event", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "userEvent" in jsonified_request + assert jsonified_request["userEvent"] == "user_event_value" + + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = httpbody_pb2.HttpBody() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.collect_user_event(request) + + expected_params = [ + ( + "userEvent", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_collect_user_event_rest_unset_required_fields(): + transport = transports.UserEventServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.collect_user_event._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "ets", + "uri", + "userEvent", + ) + ) + & set( + ( + "parent", + "userEvent", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_collect_user_event_rest_interceptors(null_interceptor): + transport = transports.UserEventServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.UserEventServiceRestInterceptor(), + ) + client = UserEventServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.UserEventServiceRestInterceptor, "post_collect_user_event" + ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, "pre_collect_user_event" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = user_event_service.CollectUserEventRequest.pb( + user_event_service.CollectUserEventRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(httpbody_pb2.HttpBody()) + + request = user_event_service.CollectUserEventRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = httpbody_pb2.HttpBody() + + client.collect_user_event( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_collect_user_event_rest_bad_request( + transport: str = "rest", request_type=user_event_service.CollectUserEventRequest +): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.collect_user_event(request) + + +def test_collect_user_event_rest_error(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + import_config.ImportUserEventsRequest, + dict, + ], +) +def test_import_user_events_rest(request_type): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.import_user_events(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_import_user_events_rest_required_fields( + request_type=import_config.ImportUserEventsRequest, +): + transport_class = transports.UserEventServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_user_events._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_user_events._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.import_user_events(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_import_user_events_rest_unset_required_fields(): + transport = transports.UserEventServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.import_user_events._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "inlineSource", + "gcsSource", + "bigquerySource", + "parent", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_user_events_rest_interceptors(null_interceptor): + transport = transports.UserEventServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.UserEventServiceRestInterceptor(), + ) + client = UserEventServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.UserEventServiceRestInterceptor, "post_import_user_events" + ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, "pre_import_user_events" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = import_config.ImportUserEventsRequest.pb( + import_config.ImportUserEventsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = import_config.ImportUserEventsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.import_user_events( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_user_events_rest_bad_request( + transport: str = "rest", request_type=import_config.ImportUserEventsRequest +): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.import_user_events(request) + + +def test_import_user_events_rest_error(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.UserEventServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.UserEventServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = UserEventServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.UserEventServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = UserEventServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = UserEventServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.UserEventServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = UserEventServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.UserEventServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = UserEventServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.UserEventServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.UserEventServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.UserEventServiceGrpcTransport, + transports.UserEventServiceGrpcAsyncIOTransport, + transports.UserEventServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = UserEventServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.UserEventServiceGrpcTransport, + ) + + +def test_user_event_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.UserEventServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_user_event_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1.services.user_event_service.transports.UserEventServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.UserEventServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "write_user_event", + "collect_user_event", + "import_user_events", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_user_event_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1.services.user_event_service.transports.UserEventServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.UserEventServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_user_event_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1.services.user_event_service.transports.UserEventServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.UserEventServiceTransport() + adc.assert_called_once() + + +def test_user_event_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + UserEventServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.UserEventServiceGrpcTransport, + transports.UserEventServiceGrpcAsyncIOTransport, + ], +) +def test_user_event_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.UserEventServiceGrpcTransport, + transports.UserEventServiceGrpcAsyncIOTransport, + transports.UserEventServiceRestTransport, + ], +) +def test_user_event_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.UserEventServiceGrpcTransport, grpc_helpers), + (transports.UserEventServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_user_event_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.UserEventServiceGrpcTransport, + transports.UserEventServiceGrpcAsyncIOTransport, + ], +) +def test_user_event_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_user_event_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.UserEventServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_user_event_service_rest_lro_client(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_user_event_service_host_no_port(transport_name): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_user_event_service_host_with_port(transport_name): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_user_event_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = UserEventServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = UserEventServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.write_user_event._session + session2 = client2.transport.write_user_event._session + assert session1 != session2 + session1 = client1.transport.collect_user_event._session + session2 = client2.transport.collect_user_event._session + assert session1 != session2 + session1 = client1.transport.import_user_events._session + session2 = client2.transport.import_user_events._session + assert session1 != session2 + + +def test_user_event_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.UserEventServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_user_event_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.UserEventServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.UserEventServiceGrpcTransport, + transports.UserEventServiceGrpcAsyncIOTransport, + ], +) +def test_user_event_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.UserEventServiceGrpcTransport, + transports.UserEventServiceGrpcAsyncIOTransport, + ], +) +def test_user_event_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_user_event_service_grpc_lro_client(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_user_event_service_grpc_lro_async_client(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_store_path(): + project = "squid" + location = "clam" + data_store = "whelk" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = UserEventServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "octopus", + "location": "oyster", + "data_store": "nudibranch", + } + path = UserEventServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = UserEventServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_document_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + branch = "nautilus" + document = "scallop" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + document=document, + ) + actual = UserEventServiceClient.document_path( + project, location, data_store, branch, document + ) + assert expected == actual + + +def test_parse_document_path(): + expected = { + "project": "abalone", + "location": "squid", + "data_store": "clam", + "branch": "whelk", + "document": "octopus", + } + path = UserEventServiceClient.document_path(**expected) + + # Check that the path construction is reversible. + actual = UserEventServiceClient.parse_document_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = UserEventServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = UserEventServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = UserEventServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = UserEventServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = UserEventServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = UserEventServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = UserEventServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = UserEventServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = UserEventServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = UserEventServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = UserEventServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = UserEventServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = UserEventServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = UserEventServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = UserEventServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.UserEventServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.UserEventServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = UserEventServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (UserEventServiceClient, transports.UserEventServiceGrpcTransport), + (UserEventServiceAsyncClient, transports.UserEventServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py new file mode 100644 index 000000000000..64a50bed6459 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py @@ -0,0 +1,2307 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1beta.services.completion_service import ( + CompletionServiceAsyncClient, + CompletionServiceClient, + transports, +) +from google.cloud.discoveryengine_v1beta.types import completion_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CompletionServiceClient._get_default_mtls_endpoint(None) is None + assert ( + CompletionServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + CompletionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CompletionServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CompletionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CompletionServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CompletionServiceClient, "grpc"), + (CompletionServiceAsyncClient, "grpc_asyncio"), + (CompletionServiceClient, "rest"), + ], +) +def test_completion_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CompletionServiceGrpcTransport, "grpc"), + (transports.CompletionServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CompletionServiceRestTransport, "rest"), + ], +) +def test_completion_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CompletionServiceClient, "grpc"), + (CompletionServiceAsyncClient, "grpc_asyncio"), + (CompletionServiceClient, "rest"), + ], +) +def test_completion_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_completion_service_client_get_transport_class(): + transport = CompletionServiceClient.get_transport_class() + available_transports = [ + transports.CompletionServiceGrpcTransport, + transports.CompletionServiceRestTransport, + ] + assert transport in available_transports + + transport = CompletionServiceClient.get_transport_class("grpc") + assert transport == transports.CompletionServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CompletionServiceClient, transports.CompletionServiceGrpcTransport, "grpc"), + ( + CompletionServiceAsyncClient, + transports.CompletionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CompletionServiceClient, transports.CompletionServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + CompletionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CompletionServiceClient), +) +@mock.patch.object( + CompletionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CompletionServiceAsyncClient), +) +def test_completion_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CompletionServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CompletionServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + CompletionServiceClient, + transports.CompletionServiceGrpcTransport, + "grpc", + "true", + ), + ( + CompletionServiceAsyncClient, + transports.CompletionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + CompletionServiceClient, + transports.CompletionServiceGrpcTransport, + "grpc", + "false", + ), + ( + CompletionServiceAsyncClient, + transports.CompletionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + CompletionServiceClient, + transports.CompletionServiceRestTransport, + "rest", + "true", + ), + ( + CompletionServiceClient, + transports.CompletionServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + CompletionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CompletionServiceClient), +) +@mock.patch.object( + CompletionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CompletionServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_completion_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [CompletionServiceClient, CompletionServiceAsyncClient] +) +@mock.patch.object( + CompletionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CompletionServiceClient), +) +@mock.patch.object( + CompletionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CompletionServiceAsyncClient), +) +def test_completion_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CompletionServiceClient, transports.CompletionServiceGrpcTransport, "grpc"), + ( + CompletionServiceAsyncClient, + transports.CompletionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CompletionServiceClient, transports.CompletionServiceRestTransport, "rest"), + ], +) +def test_completion_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CompletionServiceClient, + transports.CompletionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CompletionServiceAsyncClient, + transports.CompletionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + CompletionServiceClient, + transports.CompletionServiceRestTransport, + "rest", + None, + ), + ], +) +def test_completion_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_completion_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.completion_service.transports.CompletionServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CompletionServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CompletionServiceClient, + transports.CompletionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CompletionServiceAsyncClient, + transports.CompletionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_completion_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + completion_service.CompleteQueryRequest, + dict, + ], +) +def test_complete_query(request_type, transport: str = "grpc"): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.complete_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = completion_service.CompleteQueryResponse() + response = client.complete_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == completion_service.CompleteQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, completion_service.CompleteQueryResponse) + + +def test_complete_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.complete_query), "__call__") as call: + client.complete_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == completion_service.CompleteQueryRequest() + + +@pytest.mark.asyncio +async def test_complete_query_async( + transport: str = "grpc_asyncio", + request_type=completion_service.CompleteQueryRequest, +): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.complete_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + completion_service.CompleteQueryResponse() + ) + response = await client.complete_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == completion_service.CompleteQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, completion_service.CompleteQueryResponse) + + +@pytest.mark.asyncio +async def test_complete_query_async_from_dict(): + await test_complete_query_async(request_type=dict) + + +def test_complete_query_field_headers(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = completion_service.CompleteQueryRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.complete_query), "__call__") as call: + call.return_value = completion_service.CompleteQueryResponse() + client.complete_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_complete_query_field_headers_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = completion_service.CompleteQueryRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.complete_query), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + completion_service.CompleteQueryResponse() + ) + await client.complete_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + completion_service.CompleteQueryRequest, + dict, + ], +) +def test_complete_query_rest(request_type): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/dataStores/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = completion_service.CompleteQueryResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = completion_service.CompleteQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.complete_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, completion_service.CompleteQueryResponse) + + +def test_complete_query_rest_required_fields( + request_type=completion_service.CompleteQueryRequest, +): + transport_class = transports.CompletionServiceRestTransport + + request_init = {} + request_init["data_store"] = "" + request_init["query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "query" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).complete_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "query" in jsonified_request + assert jsonified_request["query"] == request_init["query"] + + jsonified_request["dataStore"] = "data_store_value" + jsonified_request["query"] = "query_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).complete_query._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "query", + "query_model", + "user_pseudo_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "dataStore" in jsonified_request + assert jsonified_request["dataStore"] == "data_store_value" + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" + + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = completion_service.CompleteQueryResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = completion_service.CompleteQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.complete_query(request) + + expected_params = [ + ( + "query", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_complete_query_rest_unset_required_fields(): + transport = transports.CompletionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.complete_query._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "query", + "queryModel", + "userPseudoId", + ) + ) + & set( + ( + "dataStore", + "query", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_complete_query_rest_interceptors(null_interceptor): + transport = transports.CompletionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CompletionServiceRestInterceptor(), + ) + client = CompletionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CompletionServiceRestInterceptor, "post_complete_query" + ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, "pre_complete_query" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = completion_service.CompleteQueryRequest.pb( + completion_service.CompleteQueryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = completion_service.CompleteQueryResponse.to_json( + completion_service.CompleteQueryResponse() + ) + + request = completion_service.CompleteQueryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = completion_service.CompleteQueryResponse() + + client.complete_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_complete_query_rest_bad_request( + transport: str = "rest", request_type=completion_service.CompleteQueryRequest +): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/dataStores/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.complete_query(request) + + +def test_complete_query_rest_error(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CompletionServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CompletionServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CompletionServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CompletionServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CompletionServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CompletionServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CompletionServiceGrpcTransport, + transports.CompletionServiceGrpcAsyncIOTransport, + transports.CompletionServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CompletionServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CompletionServiceGrpcTransport, + ) + + +def test_completion_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CompletionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_completion_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.completion_service.transports.CompletionServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CompletionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "complete_query", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_completion_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1beta.services.completion_service.transports.CompletionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CompletionServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_completion_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1beta.services.completion_service.transports.CompletionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CompletionServiceTransport() + adc.assert_called_once() + + +def test_completion_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CompletionServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CompletionServiceGrpcTransport, + transports.CompletionServiceGrpcAsyncIOTransport, + ], +) +def test_completion_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CompletionServiceGrpcTransport, + transports.CompletionServiceGrpcAsyncIOTransport, + transports.CompletionServiceRestTransport, + ], +) +def test_completion_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CompletionServiceGrpcTransport, grpc_helpers), + (transports.CompletionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_completion_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CompletionServiceGrpcTransport, + transports.CompletionServiceGrpcAsyncIOTransport, + ], +) +def test_completion_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_completion_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CompletionServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_completion_service_host_no_port(transport_name): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_completion_service_host_with_port(transport_name): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_completion_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CompletionServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CompletionServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.complete_query._session + session2 = client2.transport.complete_query._session + assert session1 != session2 + + +def test_completion_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CompletionServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_completion_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CompletionServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CompletionServiceGrpcTransport, + transports.CompletionServiceGrpcAsyncIOTransport, + ], +) +def test_completion_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CompletionServiceGrpcTransport, + transports.CompletionServiceGrpcAsyncIOTransport, + ], +) +def test_completion_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_store_path(): + project = "squid" + location = "clam" + data_store = "whelk" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = CompletionServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "octopus", + "location": "oyster", + "data_store": "nudibranch", + } + path = CompletionServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = CompletionServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CompletionServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = CompletionServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CompletionServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = CompletionServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = CompletionServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CompletionServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = CompletionServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = CompletionServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CompletionServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = CompletionServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = CompletionServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CompletionServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = CompletionServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = CompletionServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CompletionServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CompletionServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CompletionServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CompletionServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CompletionServiceClient, transports.CompletionServiceGrpcTransport), + ( + CompletionServiceAsyncClient, + transports.CompletionServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py index ec1d1e9762e3..4f7d62ae23a0 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py @@ -60,7 +60,11 @@ pagers, transports, ) -from google.cloud.discoveryengine_v1beta.types import document_service, import_config +from google.cloud.discoveryengine_v1beta.types import ( + document_service, + import_config, + purge_config, +) from google.cloud.discoveryengine_v1beta.types import document from google.cloud.discoveryengine_v1beta.types import document as gcd_document @@ -2257,6 +2261,150 @@ async def test_import_documents_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeDocumentsRequest, + dict, + ], +) +def test_purge_documents(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.purge_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeDocumentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_purge_documents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_documents), "__call__") as call: + client.purge_documents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeDocumentsRequest() + + +@pytest.mark.asyncio +async def test_purge_documents_async( + transport: str = "grpc_asyncio", request_type=purge_config.PurgeDocumentsRequest +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.purge_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeDocumentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_purge_documents_async_from_dict(): + await test_purge_documents_async(request_type=dict) + + +def test_purge_documents_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeDocumentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_documents), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.purge_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_purge_documents_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeDocumentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_documents), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.purge_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -2913,7 +3061,13 @@ def test_create_document_rest(request_type): "name": "name_value", "id": "id_value", "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, } request = request_type(**request_init) @@ -3138,7 +3292,13 @@ def test_create_document_rest_bad_request( "name": "name_value", "id": "id_value", "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, } request = request_type(**request_init) @@ -3263,7 +3423,13 @@ def test_update_document_rest(request_type): "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", "id": "id_value", "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, } request = request_type(**request_init) @@ -3461,7 +3627,13 @@ def test_update_document_rest_bad_request( "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", "id": "id_value", "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, } request = request_type(**request_init) @@ -3955,6 +4127,231 @@ def test_import_documents_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeDocumentsRequest, + dict, + ], +) +def test_purge_documents_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.purge_documents(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_purge_documents_rest_required_fields( + request_type=purge_config.PurgeDocumentsRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["filter"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.purge_documents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_purge_documents_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.purge_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_purge_documents_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_purge_documents" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_purge_documents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = purge_config.PurgeDocumentsRequest.pb( + purge_config.PurgeDocumentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = purge_config.PurgeDocumentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.purge_documents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_purge_documents_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.purge_documents(request) + + +def test_purge_documents_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DocumentServiceGrpcTransport( @@ -4100,6 +4497,7 @@ def test_document_service_base_transport(): "update_document", "delete_document", "import_documents", + "purge_documents", "get_operation", "list_operations", ) @@ -4400,6 +4798,9 @@ def test_document_service_client_transport_session_collision(transport_name): session1 = client1.transport.import_documents._session session2 = client2.transport.import_documents._session assert session1 != session2 + session1 = client1.transport.purge_documents._session + session2 = client2.transport.purge_documents._session + assert session1 != session2 def test_document_service_grpc_transport_channel(): @@ -4776,7 +5177,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/operations/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" }, request, ) @@ -4806,7 +5207,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/operations/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4839,7 +5240,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" }, request, ) @@ -4869,7 +5270,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py index fb3717ab0abb..f501949e9a28 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py @@ -1892,7 +1892,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/operations/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" }, request, ) @@ -1922,7 +1922,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/operations/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1955,7 +1955,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" }, request, ) @@ -1985,7 +1985,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py new file mode 100644 index 000000000000..09bd76f6040f --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py @@ -0,0 +1,4747 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import struct_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1beta.services.schema_service import ( + SchemaServiceAsyncClient, + SchemaServiceClient, + pagers, + transports, +) +from google.cloud.discoveryengine_v1beta.types import schema +from google.cloud.discoveryengine_v1beta.types import schema as gcd_schema +from google.cloud.discoveryengine_v1beta.types import schema_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SchemaServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SchemaServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SchemaServiceClient, "grpc"), + (SchemaServiceAsyncClient, "grpc_asyncio"), + (SchemaServiceClient, "rest"), + ], +) +def test_schema_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SchemaServiceGrpcTransport, "grpc"), + (transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SchemaServiceRestTransport, "rest"), + ], +) +def test_schema_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SchemaServiceClient, "grpc"), + (SchemaServiceAsyncClient, "grpc_asyncio"), + (SchemaServiceClient, "rest"), + ], +) +def test_schema_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_schema_service_client_get_transport_class(): + transport = SchemaServiceClient.get_transport_class() + available_transports = [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceRestTransport, + ] + assert transport in available_transports + + transport = SchemaServiceClient.get_transport_class("grpc") + assert transport == transports.SchemaServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + SchemaServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceAsyncClient), +) +def test_schema_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SchemaServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SchemaServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", "true"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", "false"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest", "true"), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + SchemaServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_schema_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [SchemaServiceClient, SchemaServiceAsyncClient] +) +@mock.patch.object( + SchemaServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceAsyncClient), +) +def test_schema_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest"), + ], +) +def test_schema_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SchemaServiceClient, + transports.SchemaServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest", None), + ], +) +def test_schema_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_schema_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.schema_service.transports.SchemaServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SchemaServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SchemaServiceClient, + transports.SchemaServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_schema_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.GetSchemaRequest, + dict, + ], +) +def test_get_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema( + name="name_value", + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ), + ) + response = client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.GetSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + + +def test_get_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + client.get_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.GetSchemaRequest() + + +@pytest.mark.asyncio +async def test_get_schema_async( + transport: str = "grpc_asyncio", request_type=schema_service.GetSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + ) + ) + response = await client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.GetSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_schema_async_from_dict(): + await test_get_schema_async(request_type=dict) + + +def test_get_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.GetSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + call.return_value = schema.Schema() + client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.GetSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + await client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_schema( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_schema( + schema_service.GetSchemaRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_schema( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_schema( + schema_service.GetSchemaRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.ListSchemasRequest, + dict, + ], +) +def test_list_schemas(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema_service.ListSchemasResponse( + next_page_token="next_page_token_value", + ) + response = client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.ListSchemasRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemasPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_schemas_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + client.list_schemas() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.ListSchemasRequest() + + +@pytest.mark.asyncio +async def test_list_schemas_async( + transport: str = "grpc_asyncio", request_type=schema_service.ListSchemasRequest +): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema_service.ListSchemasResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.ListSchemasRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemasAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_schemas_async_from_dict(): + await test_list_schemas_async(request_type=dict) + + +def test_list_schemas_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.ListSchemasRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + call.return_value = schema_service.ListSchemasResponse() + client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_schemas_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.ListSchemasRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema_service.ListSchemasResponse() + ) + await client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_schemas_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema_service.ListSchemasResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_schemas( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_schemas_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_schemas( + schema_service.ListSchemasRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_schemas_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema_service.ListSchemasResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema_service.ListSchemasResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_schemas( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_schemas_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_schemas( + schema_service.ListSchemasRequest(), + parent="parent_value", + ) + + +def test_list_schemas_pager(transport_name: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema_service.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_schemas(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, schema.Schema) for i in results) + + +def test_list_schemas_pages(transport_name: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema_service.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + pages = list(client.list_schemas(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_schemas_async_pager(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema_service.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_schemas( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, schema.Schema) for i in responses) + + +@pytest.mark.asyncio +async def test_list_schemas_async_pages(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema_service.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_schemas(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.CreateSchemaRequest, + dict, + ], +) +def test_create_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.CreateSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + client.create_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.CreateSchemaRequest() + + +@pytest.mark.asyncio +async def test_create_schema_async( + transport: str = "grpc_asyncio", request_type=schema_service.CreateSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.CreateSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_schema_async_from_dict(): + await test_create_schema_async(request_type=dict) + + +def test_create_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.CreateSchemaRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.CreateSchemaRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_schema( + parent="parent_value", + schema=gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + schema_id="schema_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ) + assert arg == mock_val + arg = args[0].schema_id + mock_val = "schema_id_value" + assert arg == mock_val + + +def test_create_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_schema( + schema_service.CreateSchemaRequest(), + parent="parent_value", + schema=gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + schema_id="schema_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_schema( + parent="parent_value", + schema=gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + schema_id="schema_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ) + assert arg == mock_val + arg = args[0].schema_id + mock_val = "schema_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_schema( + schema_service.CreateSchemaRequest(), + parent="parent_value", + schema=gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + schema_id="schema_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.UpdateSchemaRequest, + dict, + ], +) +def test_update_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.UpdateSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_schema), "__call__") as call: + client.update_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.UpdateSchemaRequest() + + +@pytest.mark.asyncio +async def test_update_schema_async( + transport: str = "grpc_asyncio", request_type=schema_service.UpdateSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.UpdateSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_schema_async_from_dict(): + await test_update_schema_async(request_type=dict) + + +def test_update_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.UpdateSchemaRequest() + + request.schema.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_schema), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "schema.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.UpdateSchemaRequest() + + request.schema.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "schema.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.DeleteSchemaRequest, + dict, + ], +) +def test_delete_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.DeleteSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + client.delete_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.DeleteSchemaRequest() + + +@pytest.mark.asyncio +async def test_delete_schema_async( + transport: str = "grpc_asyncio", request_type=schema_service.DeleteSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema_service.DeleteSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_schema_async_from_dict(): + await test_delete_schema_async(request_type=dict) + + +def test_delete_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.DeleteSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema_service.DeleteSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_schema( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_schema( + schema_service.DeleteSchemaRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_schema( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_schema( + schema_service.DeleteSchemaRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.GetSchemaRequest, + dict, + ], +) +def test_get_schema_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema( + name="name_value", + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + + +def test_get_schema_rest_required_fields(request_type=schema_service.GetSchemaRequest): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_schema._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_get_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_get_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema_service.GetSchemaRequest.pb( + schema_service.GetSchemaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = schema.Schema.to_json(schema.Schema()) + + request = schema_service.GetSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.Schema() + + client.get_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_schema_rest_bad_request( + transport: str = "rest", request_type=schema_service.GetSchemaRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_schema(request) + + +def test_get_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/dataStores/*/schemas/*}" + % client.transport._host, + args[1], + ) + + +def test_get_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_schema( + schema_service.GetSchemaRequest(), + name="name_value", + ) + + +def test_get_schema_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.ListSchemasRequest, + dict, + ], +) +def test_list_schemas_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema_service.ListSchemasResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema_service.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_schemas(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemasPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_schemas_rest_required_fields( + request_type=schema_service.ListSchemasRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_schemas._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_schemas._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema_service.ListSchemasResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = schema_service.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_schemas(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_schemas_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_schemas._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_schemas_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_list_schemas" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_list_schemas" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema_service.ListSchemasRequest.pb( + schema_service.ListSchemasRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = schema_service.ListSchemasResponse.to_json( + schema_service.ListSchemasResponse() + ) + + request = schema_service.ListSchemasRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema_service.ListSchemasResponse() + + client.list_schemas( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_schemas_rest_bad_request( + transport: str = "rest", request_type=schema_service.ListSchemasRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_schemas(request) + + +def test_list_schemas_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema_service.ListSchemasResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema_service.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_schemas(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/dataStores/*}/schemas" + % client.transport._host, + args[1], + ) + + +def test_list_schemas_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_schemas( + schema_service.ListSchemasRequest(), + parent="parent_value", + ) + + +def test_list_schemas_rest_pager(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema_service.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema_service.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + schema_service.ListSchemasResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + pager = client.list_schemas(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, schema.Schema) for i in results) + + pages = list(client.list_schemas(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.CreateSchemaRequest, + dict, + ], +) +def test_create_schema_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request_init["schema"] = { + "struct_schema": {"fields": {}}, + "json_schema": "json_schema_value", + "name": "name_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_schema(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_schema_rest_required_fields( + request_type=schema_service.CreateSchemaRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["schema_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "schemaId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "schemaId" in jsonified_request + assert jsonified_request["schemaId"] == request_init["schema_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["schemaId"] = "schema_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_schema._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("schema_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "schemaId" in jsonified_request + assert jsonified_request["schemaId"] == "schema_id_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_schema(request) + + expected_params = [ + ( + "schemaId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_schema._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("schemaId",)) + & set( + ( + "parent", + "schema", + "schemaId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_create_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_create_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema_service.CreateSchemaRequest.pb( + schema_service.CreateSchemaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = schema_service.CreateSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_schema_rest_bad_request( + transport: str = "rest", request_type=schema_service.CreateSchemaRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request_init["schema"] = { + "struct_schema": {"fields": {}}, + "json_schema": "json_schema_value", + "name": "name_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_schema(request) + + +def test_create_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + schema=gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + schema_id="schema_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/dataStores/*}/schemas" + % client.transport._host, + args[1], + ) + + +def test_create_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_schema( + schema_service.CreateSchemaRequest(), + parent="parent_value", + schema=gcd_schema.Schema( + struct_schema=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + schema_id="schema_id_value", + ) + + +def test_create_schema_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.UpdateSchemaRequest, + dict, + ], +) +def test_update_schema_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "schema": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + } + request_init["schema"] = { + "struct_schema": {"fields": {}}, + "json_schema": "json_schema_value", + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_schema(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_schema_rest_required_fields( + request_type=schema_service.UpdateSchemaRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_schema._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_schema._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing",)) & set(("schema",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_update_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_update_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema_service.UpdateSchemaRequest.pb( + schema_service.UpdateSchemaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = schema_service.UpdateSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_schema_rest_bad_request( + transport: str = "rest", request_type=schema_service.UpdateSchemaRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "schema": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + } + request_init["schema"] = { + "struct_schema": {"fields": {}}, + "json_schema": "json_schema_value", + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_schema(request) + + +def test_update_schema_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema_service.DeleteSchemaRequest, + dict, + ], +) +def test_delete_schema_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_schema(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_schema_rest_required_fields( + request_type=schema_service.DeleteSchemaRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_schema._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_delete_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_delete_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema_service.DeleteSchemaRequest.pb( + schema_service.DeleteSchemaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = schema_service.DeleteSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_schema_rest_bad_request( + transport: str = "rest", request_type=schema_service.DeleteSchemaRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_schema(request) + + +def test_delete_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/schemas/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/dataStores/*/schemas/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_schema( + schema_service.DeleteSchemaRequest(), + name="name_value", + ) + + +def test_delete_schema_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SchemaServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SchemaServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + transports.SchemaServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = SchemaServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SchemaServiceGrpcTransport, + ) + + +def test_schema_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SchemaServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_schema_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.schema_service.transports.SchemaServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SchemaServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_schema", + "list_schemas", + "create_schema", + "update_schema", + "delete_schema", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_schema_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1beta.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SchemaServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_schema_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1beta.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SchemaServiceTransport() + adc.assert_called_once() + + +def test_schema_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SchemaServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + transports.SchemaServiceRestTransport, + ], +) +def test_schema_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SchemaServiceGrpcTransport, grpc_helpers), + (transports.SchemaServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_schema_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_schema_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SchemaServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_schema_service_rest_lro_client(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_schema_service_host_no_port(transport_name): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_schema_service_host_with_port(transport_name): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_schema_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SchemaServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SchemaServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_schema._session + session2 = client2.transport.get_schema._session + assert session1 != session2 + session1 = client1.transport.list_schemas._session + session2 = client2.transport.list_schemas._session + assert session1 != session2 + session1 = client1.transport.create_schema._session + session2 = client2.transport.create_schema._session + assert session1 != session2 + session1 = client1.transport.update_schema._session + session2 = client2.transport.update_schema._session + assert session1 != session2 + session1 = client1.transport.delete_schema._session + session2 = client2.transport.delete_schema._session + assert session1 != session2 + + +def test_schema_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SchemaServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_schema_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SchemaServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_schema_service_grpc_lro_client(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_schema_service_grpc_lro_async_client(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_store_path(): + project = "squid" + location = "clam" + data_store = "whelk" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = SchemaServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "octopus", + "location": "oyster", + "data_store": "nudibranch", + } + path = SchemaServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_schema_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + schema = "nautilus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/schemas/{schema}".format( + project=project, + location=location, + data_store=data_store, + schema=schema, + ) + actual = SchemaServiceClient.schema_path(project, location, data_store, schema) + assert expected == actual + + +def test_parse_schema_path(): + expected = { + "project": "scallop", + "location": "abalone", + "data_store": "squid", + "schema": "clam", + } + path = SchemaServiceClient.schema_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_schema_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SchemaServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = SchemaServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SchemaServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = SchemaServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SchemaServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = SchemaServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = SchemaServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = SchemaServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SchemaServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = SchemaServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SchemaServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SchemaServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SchemaServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport), + (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py new file mode 100644 index 000000000000..daea82e80293 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py @@ -0,0 +1,2585 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import struct_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1beta.services.search_service import ( + SearchServiceAsyncClient, + SearchServiceClient, + pagers, + transports, +) +from google.cloud.discoveryengine_v1beta.types import common, search_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SearchServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SearchServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SearchServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SearchServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SearchServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SearchServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SearchServiceClient, "grpc"), + (SearchServiceAsyncClient, "grpc_asyncio"), + (SearchServiceClient, "rest"), + ], +) +def test_search_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SearchServiceGrpcTransport, "grpc"), + (transports.SearchServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SearchServiceRestTransport, "rest"), + ], +) +def test_search_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SearchServiceClient, "grpc"), + (SearchServiceAsyncClient, "grpc_asyncio"), + (SearchServiceClient, "rest"), + ], +) +def test_search_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_search_service_client_get_transport_class(): + transport = SearchServiceClient.get_transport_class() + available_transports = [ + transports.SearchServiceGrpcTransport, + transports.SearchServiceRestTransport, + ] + assert transport in available_transports + + transport = SearchServiceClient.get_transport_class("grpc") + assert transport == transports.SearchServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SearchServiceClient, transports.SearchServiceGrpcTransport, "grpc"), + ( + SearchServiceAsyncClient, + transports.SearchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SearchServiceClient, transports.SearchServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + SearchServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchServiceClient), +) +@mock.patch.object( + SearchServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchServiceAsyncClient), +) +def test_search_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SearchServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SearchServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (SearchServiceClient, transports.SearchServiceGrpcTransport, "grpc", "true"), + ( + SearchServiceAsyncClient, + transports.SearchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (SearchServiceClient, transports.SearchServiceGrpcTransport, "grpc", "false"), + ( + SearchServiceAsyncClient, + transports.SearchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (SearchServiceClient, transports.SearchServiceRestTransport, "rest", "true"), + (SearchServiceClient, transports.SearchServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + SearchServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchServiceClient), +) +@mock.patch.object( + SearchServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_search_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [SearchServiceClient, SearchServiceAsyncClient] +) +@mock.patch.object( + SearchServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchServiceClient), +) +@mock.patch.object( + SearchServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchServiceAsyncClient), +) +def test_search_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SearchServiceClient, transports.SearchServiceGrpcTransport, "grpc"), + ( + SearchServiceAsyncClient, + transports.SearchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SearchServiceClient, transports.SearchServiceRestTransport, "rest"), + ], +) +def test_search_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SearchServiceClient, + transports.SearchServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SearchServiceAsyncClient, + transports.SearchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (SearchServiceClient, transports.SearchServiceRestTransport, "rest", None), + ], +) +def test_search_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_search_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.search_service.transports.SearchServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SearchServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SearchServiceClient, + transports.SearchServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SearchServiceAsyncClient, + transports.SearchServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_search_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + search_service.SearchRequest, + dict, + ], +) +def test_search(request_type, transport: str = "grpc"): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = search_service.SearchResponse( + total_size=1086, + attribution_token="attribution_token_value", + next_page_token="next_page_token_value", + corrected_query="corrected_query_value", + applied_controls=["applied_controls_value"], + ) + response = client.search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == search_service.SearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchPager) + assert response.total_size == 1086 + assert response.attribution_token == "attribution_token_value" + assert response.next_page_token == "next_page_token_value" + assert response.corrected_query == "corrected_query_value" + assert response.applied_controls == ["applied_controls_value"] + + +def test_search_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search), "__call__") as call: + client.search() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_service.SearchRequest() + + +@pytest.mark.asyncio +async def test_search_async( + transport: str = "grpc_asyncio", request_type=search_service.SearchRequest +): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_service.SearchResponse( + total_size=1086, + attribution_token="attribution_token_value", + next_page_token="next_page_token_value", + corrected_query="corrected_query_value", + applied_controls=["applied_controls_value"], + ) + ) + response = await client.search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == search_service.SearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAsyncPager) + assert response.total_size == 1086 + assert response.attribution_token == "attribution_token_value" + assert response.next_page_token == "next_page_token_value" + assert response.corrected_query == "corrected_query_value" + assert response.applied_controls == ["applied_controls_value"] + + +@pytest.mark.asyncio +async def test_search_async_from_dict(): + await test_search_async(request_type=dict) + + +def test_search_field_headers(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_service.SearchRequest() + + request.serving_config = "serving_config_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search), "__call__") as call: + call.return_value = search_service.SearchResponse() + client.search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "serving_config=serving_config_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_search_field_headers_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_service.SearchRequest() + + request.serving_config = "serving_config_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_service.SearchResponse() + ) + await client.search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "serving_config=serving_config_value", + ) in kw["metadata"] + + +def test_search_pager(transport_name: str = "grpc"): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + next_page_token="abc", + ), + search_service.SearchResponse( + results=[], + next_page_token="def", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + ], + next_page_token="ghi", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("serving_config", ""),)), + ) + pager = client.search(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, search_service.SearchResponse.SearchResult) for i in results + ) + + +def test_search_pages(transport_name: str = "grpc"): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + next_page_token="abc", + ), + search_service.SearchResponse( + results=[], + next_page_token="def", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + ], + next_page_token="ghi", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + ), + RuntimeError, + ) + pages = list(client.search(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_search_async_pager(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + next_page_token="abc", + ), + search_service.SearchResponse( + results=[], + next_page_token="def", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + ], + next_page_token="ghi", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + ), + RuntimeError, + ) + async_pager = await client.search( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, search_service.SearchResponse.SearchResult) for i in responses + ) + + +@pytest.mark.asyncio +async def test_search_async_pages(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + next_page_token="abc", + ), + search_service.SearchResponse( + results=[], + next_page_token="def", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + ], + next_page_token="ghi", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.search(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + search_service.SearchRequest, + dict, + ], +) +def test_search_rest(request_type): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "serving_config": "projects/sample1/locations/sample2/dataStores/sample3/servingConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = search_service.SearchResponse( + total_size=1086, + attribution_token="attribution_token_value", + next_page_token="next_page_token_value", + corrected_query="corrected_query_value", + applied_controls=["applied_controls_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = search_service.SearchResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.search(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchPager) + assert response.total_size == 1086 + assert response.attribution_token == "attribution_token_value" + assert response.next_page_token == "next_page_token_value" + assert response.corrected_query == "corrected_query_value" + assert response.applied_controls == ["applied_controls_value"] + + +def test_search_rest_required_fields(request_type=search_service.SearchRequest): + transport_class = transports.SearchServiceRestTransport + + request_init = {} + request_init["serving_config"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["servingConfig"] = "serving_config_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "servingConfig" in jsonified_request + assert jsonified_request["servingConfig"] == "serving_config_value" + + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = search_service.SearchResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = search_service.SearchResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.search(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_search_rest_unset_required_fields(): + transport = transports.SearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.search._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("servingConfig",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_rest_interceptors(null_interceptor): + transport = transports.SearchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SearchServiceRestInterceptor(), + ) + client = SearchServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SearchServiceRestInterceptor, "post_search" + ) as post, mock.patch.object( + transports.SearchServiceRestInterceptor, "pre_search" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = search_service.SearchRequest.pb(search_service.SearchRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = search_service.SearchResponse.to_json( + search_service.SearchResponse() + ) + + request = search_service.SearchRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = search_service.SearchResponse() + + client.search( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_rest_bad_request( + transport: str = "rest", request_type=search_service.SearchRequest +): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "serving_config": "projects/sample1/locations/sample2/dataStores/sample3/servingConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search(request) + + +def test_search_rest_pager(transport: str = "rest"): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + next_page_token="abc", + ), + search_service.SearchResponse( + results=[], + next_page_token="def", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + ], + next_page_token="ghi", + ), + search_service.SearchResponse( + results=[ + search_service.SearchResponse.SearchResult(), + search_service.SearchResponse.SearchResult(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(search_service.SearchResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "serving_config": "projects/sample1/locations/sample2/dataStores/sample3/servingConfigs/sample4" + } + + pager = client.search(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, search_service.SearchResponse.SearchResult) for i in results + ) + + pages = list(client.search(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SearchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SearchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SearchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SearchServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SearchServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SearchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SearchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SearchServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SearchServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SearchServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchServiceGrpcTransport, + transports.SearchServiceGrpcAsyncIOTransport, + transports.SearchServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = SearchServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SearchServiceGrpcTransport, + ) + + +def test_search_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SearchServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_search_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.search_service.transports.SearchServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SearchServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "search", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_search_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1beta.services.search_service.transports.SearchServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SearchServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_search_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1beta.services.search_service.transports.SearchServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SearchServiceTransport() + adc.assert_called_once() + + +def test_search_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SearchServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchServiceGrpcTransport, + transports.SearchServiceGrpcAsyncIOTransport, + ], +) +def test_search_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchServiceGrpcTransport, + transports.SearchServiceGrpcAsyncIOTransport, + transports.SearchServiceRestTransport, + ], +) +def test_search_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SearchServiceGrpcTransport, grpc_helpers), + (transports.SearchServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_search_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchServiceGrpcTransport, + transports.SearchServiceGrpcAsyncIOTransport, + ], +) +def test_search_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_search_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SearchServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_search_service_host_no_port(transport_name): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_search_service_host_with_port(transport_name): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_search_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SearchServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SearchServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.search._session + session2 = client2.transport.search._session + assert session1 != session2 + + +def test_search_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SearchServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_search_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SearchServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchServiceGrpcTransport, + transports.SearchServiceGrpcAsyncIOTransport, + ], +) +def test_search_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchServiceGrpcTransport, + transports.SearchServiceGrpcAsyncIOTransport, + ], +) +def test_search_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_branch_path(): + project = "squid" + location = "clam" + data_store = "whelk" + branch = "octopus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + ) + actual = SearchServiceClient.branch_path(project, location, data_store, branch) + assert expected == actual + + +def test_parse_branch_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "data_store": "cuttlefish", + "branch": "mussel", + } + path = SearchServiceClient.branch_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_branch_path(path) + assert expected == actual + + +def test_document_path(): + project = "winkle" + location = "nautilus" + data_store = "scallop" + branch = "abalone" + document = "squid" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/branches/{branch}/documents/{document}".format( + project=project, + location=location, + data_store=data_store, + branch=branch, + document=document, + ) + actual = SearchServiceClient.document_path( + project, location, data_store, branch, document + ) + assert expected == actual + + +def test_parse_document_path(): + expected = { + "project": "clam", + "location": "whelk", + "data_store": "octopus", + "branch": "oyster", + "document": "nudibranch", + } + path = SearchServiceClient.document_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_document_path(path) + assert expected == actual + + +def test_serving_config_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + serving_config = "nautilus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/servingConfigs/{serving_config}".format( + project=project, + location=location, + data_store=data_store, + serving_config=serving_config, + ) + actual = SearchServiceClient.serving_config_path( + project, location, data_store, serving_config + ) + assert expected == actual + + +def test_parse_serving_config_path(): + expected = { + "project": "scallop", + "location": "abalone", + "data_store": "squid", + "serving_config": "clam", + } + path = SearchServiceClient.serving_config_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_serving_config_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SearchServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = SearchServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SearchServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = SearchServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SearchServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = SearchServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = SearchServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = SearchServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SearchServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = SearchServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SearchServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SearchServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SearchServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SearchServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = SearchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = SearchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SearchServiceClient, transports.SearchServiceGrpcTransport), + (SearchServiceAsyncClient, transports.SearchServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py index 06c401f7bcee..4c65f73d8d0d 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py @@ -2862,7 +2862,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/operations/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" }, request, ) @@ -2892,7 +2892,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/operations/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2925,7 +2925,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" }, request, ) @@ -2955,7 +2955,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-domains/.OwlBot.yaml b/packages/google-cloud-domains/.OwlBot.yaml new file mode 100644 index 000000000000..fa7d2b9e017a --- /dev/null +++ b/packages/google-cloud-domains/.OwlBot.yaml @@ -0,0 +1,27 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-preserve-regex: + - /owl-bot-staging/google-cloud-domains/v1alpha2 + +deep-copy-regex: + - source: /google/cloud/domains/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-domains/$1 + +begin-after-commit-hash: 6a5da3f1274b088752f074da5bc9e30bd1beb27e + diff --git a/packages/google-cloud-domains/.coveragerc b/packages/google-cloud-domains/.coveragerc new file mode 100644 index 000000000000..4cecf7e1137b --- /dev/null +++ b/packages/google-cloud-domains/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/domains/__init__.py + google/cloud/domains/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-domains/.flake8 b/packages/google-cloud-domains/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-cloud-domains/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-domains/.gitignore b/packages/google-cloud-domains/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-domains/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-domains/.repo-metadata.json b/packages/google-cloud-domains/.repo-metadata.json new file mode 100644 index 000000000000..7ad7d312ce5b --- /dev/null +++ b/packages/google-cloud-domains/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "domains", + "name_pretty": "Cloud Domains", + "product_documentation": "https://cloud.google.com/domains", + "client_documentation": "https://cloud.google.com/python/docs/reference/domains/latest", + "issue_tracker": "", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-domains", + "api_id": "domains.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "domains", + "api_description": "allows you to register and manage domains by using Cloud Domains." +} diff --git a/packages/google-cloud-domains/CHANGELOG.md b/packages/google-cloud-domains/CHANGELOG.md new file mode 100644 index 000000000000..edd72d24c2d1 --- /dev/null +++ b/packages/google-cloud-domains/CHANGELOG.md @@ -0,0 +1,230 @@ +# Changelog + +## [1.5.1](https://github.com/googleapis/python-domains/compare/v1.5.0...v1.5.1) (2023-03-23) + + +### Documentation + +* Fix formatting of request arg in docstring ([#201](https://github.com/googleapis/python-domains/issues/201)) ([3054d9f](https://github.com/googleapis/python-domains/commit/3054d9fa04b0d928e125a076547f048f9fabdfdc)) + +## [1.5.0](https://github.com/googleapis/python-domains/compare/v1.4.1...v1.5.0) (2023-02-19) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#195](https://github.com/googleapis/python-domains/issues/195)) ([69b69a0](https://github.com/googleapis/python-domains/commit/69b69a04ef3285d8e1812edc993cdaa4db55eb08)) + +## [1.4.1](https://github.com/googleapis/python-domains/compare/v1.4.0...v1.4.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([16f4c42](https://github.com/googleapis/python-domains/commit/16f4c420a15eac5215d559ad054ce3817d7f7819)) + + +### Documentation + +* Add documentation for enums ([16f4c42](https://github.com/googleapis/python-domains/commit/16f4c420a15eac5215d559ad054ce3817d7f7819)) + +## [1.4.0](https://github.com/googleapis/python-domains/compare/v1.3.0...v1.4.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#187](https://github.com/googleapis/python-domains/issues/187)) ([2369d69](https://github.com/googleapis/python-domains/commit/2369d6912f008382fdeaccf2688ca988334b3b9c)) + +## [1.3.0](https://github.com/googleapis/python-domains/compare/v1.2.3...v1.3.0) (2022-12-15) + + +### Features + +* Add support for `google.cloud.domains.__version__` ([91b157d](https://github.com/googleapis/python-domains/commit/91b157d47f587d28f0617bb21dc6299ec2aa581f)) +* Add typing to proto.Message based class attributes ([91b157d](https://github.com/googleapis/python-domains/commit/91b157d47f587d28f0617bb21dc6299ec2aa581f)) + + +### Bug Fixes + +* Add dict typing for client_options ([91b157d](https://github.com/googleapis/python-domains/commit/91b157d47f587d28f0617bb21dc6299ec2aa581f)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([dd3c0d7](https://github.com/googleapis/python-domains/commit/dd3c0d73a0ff0bccab0f0fd4768ff672ea2a438b)) +* Drop usage of pkg_resources ([dd3c0d7](https://github.com/googleapis/python-domains/commit/dd3c0d73a0ff0bccab0f0fd4768ff672ea2a438b)) +* Fix timeout default values ([dd3c0d7](https://github.com/googleapis/python-domains/commit/dd3c0d73a0ff0bccab0f0fd4768ff672ea2a438b)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([91b157d](https://github.com/googleapis/python-domains/commit/91b157d47f587d28f0617bb21dc6299ec2aa581f)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([dd3c0d7](https://github.com/googleapis/python-domains/commit/dd3c0d73a0ff0bccab0f0fd4768ff672ea2a438b)) + +## [1.2.3](https://github.com/googleapis/python-domains/compare/v1.2.2...v1.2.3) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#177](https://github.com/googleapis/python-domains/issues/177)) ([8ef1ff7](https://github.com/googleapis/python-domains/commit/8ef1ff7d5ddd20cd4e488c4421c5beaeb91c9703)) + +## [1.2.2](https://github.com/googleapis/python-domains/compare/v1.2.1...v1.2.2) (2022-09-29) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#175](https://github.com/googleapis/python-domains/issues/175)) ([b96d9f9](https://github.com/googleapis/python-domains/commit/b96d9f944515029de19c240919b94517746a6c05)) + +## [1.2.1](https://github.com/googleapis/python-domains/compare/v1.2.0...v1.2.1) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#162](https://github.com/googleapis/python-domains/issues/162)) ([38cb2a7](https://github.com/googleapis/python-domains/commit/38cb2a77e5e230daad9dd93619bf8be02859bc39)) +* **deps:** require proto-plus >= 1.22.0 ([38cb2a7](https://github.com/googleapis/python-domains/commit/38cb2a77e5e230daad9dd93619bf8be02859bc39)) + +## [1.2.0](https://github.com/googleapis/python-domains/compare/v1.1.3...v1.2.0) (2022-07-14) + + +### Features + +* add audience parameter ([1f81100](https://github.com/googleapis/python-domains/commit/1f811006bf8610a3b99bc42db360f40749e4f9f5)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#153](https://github.com/googleapis/python-domains/issues/153)) ([1f81100](https://github.com/googleapis/python-domains/commit/1f811006bf8610a3b99bc42db360f40749e4f9f5)) +* require python 3.7+ ([#155](https://github.com/googleapis/python-domains/issues/155)) ([c072c18](https://github.com/googleapis/python-domains/commit/c072c181d93eb12dd5ede96030dd27c21d597da8)) + +## [1.1.3](https://github.com/googleapis/python-domains/compare/v1.1.2...v1.1.3) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#144](https://github.com/googleapis/python-domains/issues/144)) ([672ff97](https://github.com/googleapis/python-domains/commit/672ff979ff60527eeda83597bccf77201200119c)) + + +### Documentation + +* fix changelog header to consistent size ([#143](https://github.com/googleapis/python-domains/issues/143)) ([00a732d](https://github.com/googleapis/python-domains/commit/00a732d48739196fd891053725a73e3dd6eef570)) + +## [1.1.2](https://github.com/googleapis/python-domains/compare/v1.1.1...v1.1.2) (2022-05-05) + + +### Documentation + +* fix type in docstring for map fields ([b12a2cd](https://github.com/googleapis/python-domains/commit/b12a2cdfb0906232f035bafbabedfecb0d24a815)) + +## [1.1.1](https://github.com/googleapis/python-domains/compare/v1.1.0...v1.1.1) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#117](https://github.com/googleapis/python-domains/issues/117)) ([e42d933](https://github.com/googleapis/python-domains/commit/e42d933fed8433e44fadf4bba410d65d1e21b491)) + +## [1.1.0](https://github.com/googleapis/python-domains/compare/v1.0.0...v1.1.0) (2022-02-11) + + +### Features + +* add api key support ([#103](https://github.com/googleapis/python-domains/issues/103)) ([2494a6f](https://github.com/googleapis/python-domains/commit/2494a6f379c911ecdddf1298abfb5ad7863906f1)) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([b0b1620](https://github.com/googleapis/python-domains/commit/b0b1620f6b464688cde4a4586c8b4e939d34839a)) + +## [1.0.0](https://www.github.com/googleapis/python-domains/compare/v0.4.1...v1.0.0) (2021-12-03) + + +### Features + +* bump release level to production/stable ([#82](https://www.github.com/googleapis/python-domains/issues/82)) ([24606e7](https://www.github.com/googleapis/python-domains/commit/24606e7dd3a90509e702d25edea9b0256420e3ae)) + +## [0.4.1](https://www.github.com/googleapis/python-domains/compare/v0.4.0...v0.4.1) (2021-11-01) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([2375457](https://www.github.com/googleapis/python-domains/commit/2375457fc8772914f8689bdcfd132c15fb504d84)) +* **deps:** require google-api-core >= 1.28.0 ([2375457](https://www.github.com/googleapis/python-domains/commit/2375457fc8772914f8689bdcfd132c15fb504d84)) + + +### Documentation + +* list oneofs in docstring ([2375457](https://www.github.com/googleapis/python-domains/commit/2375457fc8772914f8689bdcfd132c15fb504d84)) + +## [0.4.0](https://www.github.com/googleapis/python-domains/compare/v0.3.0...v0.4.0) (2021-10-21) + + +### Features + +* add support for python 3.10 ([#74](https://www.github.com/googleapis/python-domains/issues/74)) ([320b3a0](https://www.github.com/googleapis/python-domains/commit/320b3a0ecc2ebb8ff1d1414a18d5d9f39dda1ae3)) +* add v1 API, plus v1b1 methods for domain transfers ([#77](https://www.github.com/googleapis/python-domains/issues/77)) ([47434a1](https://www.github.com/googleapis/python-domains/commit/47434a15ae6205681209e668ddc358d325ac5f24)) +* set v1 as the default import ([#79](https://www.github.com/googleapis/python-domains/issues/79)) ([4e2691e](https://www.github.com/googleapis/python-domains/commit/4e2691ee781d59c73fd9cb97b39dd59caac34329)) + + +### Bug Fixes + +* **deps:** require proto-plus 1.15.0 ([#81](https://www.github.com/googleapis/python-domains/issues/81)) ([1c72855](https://www.github.com/googleapis/python-domains/commit/1c72855510a51870342e2c3f039571283b7a4534)) + +## [0.3.0](https://www.github.com/googleapis/python-domains/compare/v0.2.3...v0.3.0) (2021-10-08) + + +### Features + +* add context manager support in client ([#71](https://www.github.com/googleapis/python-domains/issues/71)) ([9b49d70](https://www.github.com/googleapis/python-domains/commit/9b49d7047d5a71899670d87dd522f8a83566e627)) + +## [0.2.3](https://www.github.com/googleapis/python-domains/compare/v0.2.2...v0.2.3) (2021-09-30) + + +### Bug Fixes + +* improper types in pagers generation ([17d4bed](https://www.github.com/googleapis/python-domains/commit/17d4bed929328cfad16595e0c27d8cf67456f633)) + +## [0.2.2](https://www.github.com/googleapis/python-domains/compare/v0.2.1...v0.2.2) (2021-09-24) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([fa12c9b](https://www.github.com/googleapis/python-domains/commit/fa12c9b4f77bf43a41df5bb84e3a12c7c2b5a48f)) + +## [0.2.1](https://www.github.com/googleapis/python-domains/compare/v0.2.0...v0.2.1) (2021-07-28) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#42](https://www.github.com/googleapis/python-domains/issues/42)) ([8c7a8cc](https://www.github.com/googleapis/python-domains/commit/8c7a8cc2923e6bf2cec6d6447ade420632d3c95a)) +* enable self signed jwt for grpc ([#47](https://www.github.com/googleapis/python-domains/issues/47)) ([d4b8730](https://www.github.com/googleapis/python-domains/commit/d4b873068ca3d0f7fadc01beee2ddfcd4f4b381a)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#43](https://www.github.com/googleapis/python-domains/issues/43)) ([2718d3b](https://www.github.com/googleapis/python-domains/commit/2718d3bbe90b019ed21437f16eafd036752beaf3)) + + +### Miscellaneous Chores + +* release as 0.2.1 ([#48](https://www.github.com/googleapis/python-domains/issues/48)) ([3567065](https://www.github.com/googleapis/python-domains/commit/35670650e12cfaa7f55156153db89bb421998688)) + +## [0.2.0](https://www.github.com/googleapis/python-domains/compare/v0.1.0...v0.2.0) (2021-07-01) + + +### Features + +* add always_use_jwt_access ([#36](https://www.github.com/googleapis/python-domains/issues/36)) ([a7d1670](https://www.github.com/googleapis/python-domains/commit/a7d16704d5682c3fb17c7f0354a688871b1ba298)) +* support self-signed JWT flow for service accounts ([4b24611](https://www.github.com/googleapis/python-domains/commit/4b246112d770cd4d4409b8a84a72f13713a59881)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([4b24611](https://www.github.com/googleapis/python-domains/commit/4b246112d770cd4d4409b8a84a72f13713a59881)) +* **deps:** add packaging requirement ([#31](https://www.github.com/googleapis/python-domains/issues/31)) ([942b7da](https://www.github.com/googleapis/python-domains/commit/942b7dadaac43081a937eb993725d670df7519e4)) +* disable always_use_jwt_access ([#39](https://www.github.com/googleapis/python-domains/issues/39)) ([7830b84](https://www.github.com/googleapis/python-domains/commit/7830b846538d3331f76cc7ca41f80b3c6f13ae45)) +* exclude docs and tests from package ([#30](https://www.github.com/googleapis/python-domains/issues/30)) ([20ebc47](https://www.github.com/googleapis/python-domains/commit/20ebc4790a7ed3c0013b6ce2fa0baea760ac6b51)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-domains/issues/1127)) ([#33](https://www.github.com/googleapis/python-domains/issues/33)) ([5b9e3d5](https://www.github.com/googleapis/python-domains/commit/5b9e3d5bacf94fda61f06a35125f80683f3ac7d7)), closes [#1126](https://www.github.com/googleapis/python-domains/issues/1126) + +## 0.1.0 (2021-02-01) + + +### Features + +* generate v1beta1 ([dfa1750](https://www.github.com/googleapis/python-domains/commit/dfa1750c955be72fdae1ae209ce37929e7558626)) diff --git a/packages/google-cloud-domains/CODE_OF_CONDUCT.md b/packages/google-cloud-domains/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-domains/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-domains/CONTRIBUTING.rst b/packages/google-cloud-domains/CONTRIBUTING.rst new file mode 100644 index 000000000000..c51a64976656 --- /dev/null +++ b/packages/google-cloud-domains/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-domains + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-domains/LICENSE b/packages/google-cloud-domains/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-domains/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-domains/MANIFEST.in b/packages/google-cloud-domains/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-cloud-domains/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-domains/README.rst b/packages/google-cloud-domains/README.rst new file mode 100644 index 000000000000..1f4d9bd8c793 --- /dev/null +++ b/packages/google-cloud-domains/README.rst @@ -0,0 +1,107 @@ +Python Client for Cloud Domains +=============================== + +|stable| |pypi| |versions| + +`Cloud Domains`_: allows you to register and manage domains by using Cloud Domains. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-domains.svg + :target: https://pypi.org/project/google-cloud-domains/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-domains.svg + :target: https://pypi.org/project/google-cloud-domains/ +.. _Cloud Domains: https://cloud.google.com/domains +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/domains/latest +.. _Product Documentation: https://cloud.google.com/domains + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud Domains.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud Domains.: https://cloud.google.com/domains +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-domains + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-domains + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud Domains + to see other available methods on the client. +- Read the `Cloud Domains Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Cloud Domains Product documentation: https://cloud.google.com/domains +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-domains/SECURITY.md b/packages/google-cloud-domains/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-domains/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-domains/docs/.github/CODEOWNERS b/packages/google-cloud-domains/docs/.github/CODEOWNERS new file mode 100644 index 000000000000..035a5a3dfea3 --- /dev/null +++ b/packages/google-cloud-domains/docs/.github/CODEOWNERS @@ -0,0 +1,11 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. +# +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax + +# The @googleapis/yoshi-python is the default owner for changes in this repo +* @googleapis/yoshi-python + + +/samples/ @googleapis/python-samples-ownersox diff --git a/packages/google-cloud-domains/docs/CHANGELOG.md b/packages/google-cloud-domains/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-domains/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-domains/docs/README.rst b/packages/google-cloud-domains/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-domains/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-domains/docs/_static/custom.css b/packages/google-cloud-domains/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-domains/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-domains/docs/_templates/layout.html b/packages/google-cloud-domains/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-domains/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-domains/docs/conf.py b/packages/google-cloud-domains/docs/conf.py new file mode 100644 index 000000000000..9110ac761229 --- /dev/null +++ b/packages/google-cloud-domains/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-domains documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-domains" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-domains", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-domains-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-domains.tex", + "google-cloud-domains Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-domains", + "google-cloud-domains Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-domains", + "google-cloud-domains Documentation", + author, + "google-cloud-domains", + "google-cloud-domains Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-domains/docs/domains_v1/domains.rst b/packages/google-cloud-domains/docs/domains_v1/domains.rst new file mode 100644 index 000000000000..bfb140d20b36 --- /dev/null +++ b/packages/google-cloud-domains/docs/domains_v1/domains.rst @@ -0,0 +1,10 @@ +Domains +------------------------- + +.. automodule:: google.cloud.domains_v1.services.domains + :members: + :inherited-members: + +.. automodule:: google.cloud.domains_v1.services.domains.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-domains/docs/domains_v1/services.rst b/packages/google-cloud-domains/docs/domains_v1/services.rst new file mode 100644 index 000000000000..1073e863174d --- /dev/null +++ b/packages/google-cloud-domains/docs/domains_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Domains v1 API +======================================== +.. toctree:: + :maxdepth: 2 + + domains diff --git a/packages/google-cloud-domains/docs/domains_v1/types.rst b/packages/google-cloud-domains/docs/domains_v1/types.rst new file mode 100644 index 000000000000..f2fefd01dbb1 --- /dev/null +++ b/packages/google-cloud-domains/docs/domains_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Domains v1 API +===================================== + +.. automodule:: google.cloud.domains_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-domains/docs/domains_v1beta1/domains.rst b/packages/google-cloud-domains/docs/domains_v1beta1/domains.rst new file mode 100644 index 000000000000..13d490aa39ad --- /dev/null +++ b/packages/google-cloud-domains/docs/domains_v1beta1/domains.rst @@ -0,0 +1,10 @@ +Domains +------------------------- + +.. automodule:: google.cloud.domains_v1beta1.services.domains + :members: + :inherited-members: + +.. automodule:: google.cloud.domains_v1beta1.services.domains.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-domains/docs/domains_v1beta1/services.rst b/packages/google-cloud-domains/docs/domains_v1beta1/services.rst new file mode 100644 index 000000000000..3e3faa819d36 --- /dev/null +++ b/packages/google-cloud-domains/docs/domains_v1beta1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Domains v1beta1 API +============================================= +.. toctree:: + :maxdepth: 2 + + domains diff --git a/packages/google-cloud-domains/docs/domains_v1beta1/types.rst b/packages/google-cloud-domains/docs/domains_v1beta1/types.rst new file mode 100644 index 000000000000..dbbe2b8c8caf --- /dev/null +++ b/packages/google-cloud-domains/docs/domains_v1beta1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Domains v1beta1 API +========================================== + +.. automodule:: google.cloud.domains_v1beta1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-domains/docs/index.rst b/packages/google-cloud-domains/docs/index.rst new file mode 100644 index 000000000000..73bd5dcbc694 --- /dev/null +++ b/packages/google-cloud-domains/docs/index.rst @@ -0,0 +1,34 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of Cloud Domains. +By default, you will get version ``domains_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + domains_v1/services + domains_v1/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + domains_v1beta1/services + domains_v1beta1/types + + +Changelog +--------- + +For a list of all ``google-cloud-domains`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-domains/docs/multiprocessing.rst b/packages/google-cloud-domains/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-domains/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-domains/domains-v1beta1-py.tar.gz b/packages/google-cloud-domains/domains-v1beta1-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-domains/google/cloud/domains/__init__.py b/packages/google-cloud-domains/google/cloud/domains/__init__.py new file mode 100644 index 000000000000..3f4f78b6c2e8 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains/__init__.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.domains import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.domains_v1.services.domains.async_client import DomainsAsyncClient +from google.cloud.domains_v1.services.domains.client import DomainsClient +from google.cloud.domains_v1.types.domains import ( + AuthorizationCode, + ConfigureContactSettingsRequest, + ConfigureDnsSettingsRequest, + ConfigureManagementSettingsRequest, + ContactNotice, + ContactPrivacy, + ContactSettings, + DeleteRegistrationRequest, + DnsSettings, + DomainNotice, + ExportRegistrationRequest, + GetRegistrationRequest, + ListRegistrationsRequest, + ListRegistrationsResponse, + ManagementSettings, + OperationMetadata, + RegisterDomainRequest, + RegisterParameters, + Registration, + ResetAuthorizationCodeRequest, + RetrieveAuthorizationCodeRequest, + RetrieveRegisterParametersRequest, + RetrieveRegisterParametersResponse, + RetrieveTransferParametersRequest, + RetrieveTransferParametersResponse, + SearchDomainsRequest, + SearchDomainsResponse, + TransferDomainRequest, + TransferLockState, + TransferParameters, + UpdateRegistrationRequest, +) + +__all__ = ( + "DomainsClient", + "DomainsAsyncClient", + "AuthorizationCode", + "ConfigureContactSettingsRequest", + "ConfigureDnsSettingsRequest", + "ConfigureManagementSettingsRequest", + "ContactSettings", + "DeleteRegistrationRequest", + "DnsSettings", + "ExportRegistrationRequest", + "GetRegistrationRequest", + "ListRegistrationsRequest", + "ListRegistrationsResponse", + "ManagementSettings", + "OperationMetadata", + "RegisterDomainRequest", + "RegisterParameters", + "Registration", + "ResetAuthorizationCodeRequest", + "RetrieveAuthorizationCodeRequest", + "RetrieveRegisterParametersRequest", + "RetrieveRegisterParametersResponse", + "RetrieveTransferParametersRequest", + "RetrieveTransferParametersResponse", + "SearchDomainsRequest", + "SearchDomainsResponse", + "TransferDomainRequest", + "TransferParameters", + "UpdateRegistrationRequest", + "ContactNotice", + "ContactPrivacy", + "DomainNotice", + "TransferLockState", +) diff --git a/packages/google-cloud-domains/google/cloud/domains/gapic_version.py b/packages/google-cloud-domains/google/cloud/domains/gapic_version.py new file mode 100644 index 000000000000..69ff013987c9 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.5.1" # {x-release-please-version} diff --git a/packages/google-cloud-domains/google/cloud/domains/py.typed b/packages/google-cloud-domains/google/cloud/domains/py.typed new file mode 100644 index 000000000000..b463d6ccaf9d --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-domains package uses inline types. diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/__init__.py b/packages/google-cloud-domains/google/cloud/domains_v1/__init__.py new file mode 100644 index 000000000000..6c266e34eea5 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/__init__.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.domains_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.domains import DomainsAsyncClient, DomainsClient +from .types.domains import ( + AuthorizationCode, + ConfigureContactSettingsRequest, + ConfigureDnsSettingsRequest, + ConfigureManagementSettingsRequest, + ContactNotice, + ContactPrivacy, + ContactSettings, + DeleteRegistrationRequest, + DnsSettings, + DomainNotice, + ExportRegistrationRequest, + GetRegistrationRequest, + ListRegistrationsRequest, + ListRegistrationsResponse, + ManagementSettings, + OperationMetadata, + RegisterDomainRequest, + RegisterParameters, + Registration, + ResetAuthorizationCodeRequest, + RetrieveAuthorizationCodeRequest, + RetrieveRegisterParametersRequest, + RetrieveRegisterParametersResponse, + RetrieveTransferParametersRequest, + RetrieveTransferParametersResponse, + SearchDomainsRequest, + SearchDomainsResponse, + TransferDomainRequest, + TransferLockState, + TransferParameters, + UpdateRegistrationRequest, +) + +__all__ = ( + "DomainsAsyncClient", + "AuthorizationCode", + "ConfigureContactSettingsRequest", + "ConfigureDnsSettingsRequest", + "ConfigureManagementSettingsRequest", + "ContactNotice", + "ContactPrivacy", + "ContactSettings", + "DeleteRegistrationRequest", + "DnsSettings", + "DomainNotice", + "DomainsClient", + "ExportRegistrationRequest", + "GetRegistrationRequest", + "ListRegistrationsRequest", + "ListRegistrationsResponse", + "ManagementSettings", + "OperationMetadata", + "RegisterDomainRequest", + "RegisterParameters", + "Registration", + "ResetAuthorizationCodeRequest", + "RetrieveAuthorizationCodeRequest", + "RetrieveRegisterParametersRequest", + "RetrieveRegisterParametersResponse", + "RetrieveTransferParametersRequest", + "RetrieveTransferParametersResponse", + "SearchDomainsRequest", + "SearchDomainsResponse", + "TransferDomainRequest", + "TransferLockState", + "TransferParameters", + "UpdateRegistrationRequest", +) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/gapic_metadata.json b/packages/google-cloud-domains/google/cloud/domains_v1/gapic_metadata.json new file mode 100644 index 000000000000..a2b20806b296 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/gapic_metadata.json @@ -0,0 +1,253 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.domains_v1", + "protoPackage": "google.cloud.domains.v1", + "schema": "1.0", + "services": { + "Domains": { + "clients": { + "grpc": { + "libraryClient": "DomainsClient", + "rpcs": { + "ConfigureContactSettings": { + "methods": [ + "configure_contact_settings" + ] + }, + "ConfigureDnsSettings": { + "methods": [ + "configure_dns_settings" + ] + }, + "ConfigureManagementSettings": { + "methods": [ + "configure_management_settings" + ] + }, + "DeleteRegistration": { + "methods": [ + "delete_registration" + ] + }, + "ExportRegistration": { + "methods": [ + "export_registration" + ] + }, + "GetRegistration": { + "methods": [ + "get_registration" + ] + }, + "ListRegistrations": { + "methods": [ + "list_registrations" + ] + }, + "RegisterDomain": { + "methods": [ + "register_domain" + ] + }, + "ResetAuthorizationCode": { + "methods": [ + "reset_authorization_code" + ] + }, + "RetrieveAuthorizationCode": { + "methods": [ + "retrieve_authorization_code" + ] + }, + "RetrieveRegisterParameters": { + "methods": [ + "retrieve_register_parameters" + ] + }, + "RetrieveTransferParameters": { + "methods": [ + "retrieve_transfer_parameters" + ] + }, + "SearchDomains": { + "methods": [ + "search_domains" + ] + }, + "TransferDomain": { + "methods": [ + "transfer_domain" + ] + }, + "UpdateRegistration": { + "methods": [ + "update_registration" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DomainsAsyncClient", + "rpcs": { + "ConfigureContactSettings": { + "methods": [ + "configure_contact_settings" + ] + }, + "ConfigureDnsSettings": { + "methods": [ + "configure_dns_settings" + ] + }, + "ConfigureManagementSettings": { + "methods": [ + "configure_management_settings" + ] + }, + "DeleteRegistration": { + "methods": [ + "delete_registration" + ] + }, + "ExportRegistration": { + "methods": [ + "export_registration" + ] + }, + "GetRegistration": { + "methods": [ + "get_registration" + ] + }, + "ListRegistrations": { + "methods": [ + "list_registrations" + ] + }, + "RegisterDomain": { + "methods": [ + "register_domain" + ] + }, + "ResetAuthorizationCode": { + "methods": [ + "reset_authorization_code" + ] + }, + "RetrieveAuthorizationCode": { + "methods": [ + "retrieve_authorization_code" + ] + }, + "RetrieveRegisterParameters": { + "methods": [ + "retrieve_register_parameters" + ] + }, + "RetrieveTransferParameters": { + "methods": [ + "retrieve_transfer_parameters" + ] + }, + "SearchDomains": { + "methods": [ + "search_domains" + ] + }, + "TransferDomain": { + "methods": [ + "transfer_domain" + ] + }, + "UpdateRegistration": { + "methods": [ + "update_registration" + ] + } + } + }, + "rest": { + "libraryClient": "DomainsClient", + "rpcs": { + "ConfigureContactSettings": { + "methods": [ + "configure_contact_settings" + ] + }, + "ConfigureDnsSettings": { + "methods": [ + "configure_dns_settings" + ] + }, + "ConfigureManagementSettings": { + "methods": [ + "configure_management_settings" + ] + }, + "DeleteRegistration": { + "methods": [ + "delete_registration" + ] + }, + "ExportRegistration": { + "methods": [ + "export_registration" + ] + }, + "GetRegistration": { + "methods": [ + "get_registration" + ] + }, + "ListRegistrations": { + "methods": [ + "list_registrations" + ] + }, + "RegisterDomain": { + "methods": [ + "register_domain" + ] + }, + "ResetAuthorizationCode": { + "methods": [ + "reset_authorization_code" + ] + }, + "RetrieveAuthorizationCode": { + "methods": [ + "retrieve_authorization_code" + ] + }, + "RetrieveRegisterParameters": { + "methods": [ + "retrieve_register_parameters" + ] + }, + "RetrieveTransferParameters": { + "methods": [ + "retrieve_transfer_parameters" + ] + }, + "SearchDomains": { + "methods": [ + "search_domains" + ] + }, + "TransferDomain": { + "methods": [ + "transfer_domain" + ] + }, + "UpdateRegistration": { + "methods": [ + "update_registration" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/gapic_version.py b/packages/google-cloud-domains/google/cloud/domains_v1/gapic_version.py new file mode 100644 index 000000000000..69ff013987c9 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.5.1" # {x-release-please-version} diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/py.typed b/packages/google-cloud-domains/google/cloud/domains_v1/py.typed new file mode 100644 index 000000000000..b463d6ccaf9d --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-domains package uses inline types. diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/services/__init__.py b/packages/google-cloud-domains/google/cloud/domains_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/__init__.py b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/__init__.py new file mode 100644 index 000000000000..aa19fb0a5548 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import DomainsAsyncClient +from .client import DomainsClient + +__all__ = ( + "DomainsClient", + "DomainsAsyncClient", +) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/async_client.py b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/async_client.py new file mode 100644 index 000000000000..0d5781114c3e --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/async_client.py @@ -0,0 +1,2343 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.domains_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import money_pb2 # type: ignore + +from google.cloud.domains_v1.services.domains import pagers +from google.cloud.domains_v1.types import domains + +from .client import DomainsClient +from .transports.base import DEFAULT_CLIENT_INFO, DomainsTransport +from .transports.grpc_asyncio import DomainsGrpcAsyncIOTransport + + +class DomainsAsyncClient: + """The Cloud Domains API enables management and configuration of + domain names. + """ + + _client: DomainsClient + + DEFAULT_ENDPOINT = DomainsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DomainsClient.DEFAULT_MTLS_ENDPOINT + + registration_path = staticmethod(DomainsClient.registration_path) + parse_registration_path = staticmethod(DomainsClient.parse_registration_path) + common_billing_account_path = staticmethod( + DomainsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DomainsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DomainsClient.common_folder_path) + parse_common_folder_path = staticmethod(DomainsClient.parse_common_folder_path) + common_organization_path = staticmethod(DomainsClient.common_organization_path) + parse_common_organization_path = staticmethod( + DomainsClient.parse_common_organization_path + ) + common_project_path = staticmethod(DomainsClient.common_project_path) + parse_common_project_path = staticmethod(DomainsClient.parse_common_project_path) + common_location_path = staticmethod(DomainsClient.common_location_path) + parse_common_location_path = staticmethod(DomainsClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DomainsAsyncClient: The constructed client. + """ + return DomainsClient.from_service_account_info.__func__(DomainsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DomainsAsyncClient: The constructed client. + """ + return DomainsClient.from_service_account_file.__func__(DomainsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DomainsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DomainsTransport: + """Returns the transport used by the client instance. + + Returns: + DomainsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(DomainsClient).get_transport_class, type(DomainsClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DomainsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the domains client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DomainsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DomainsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def search_domains( + self, + request: Optional[Union[domains.SearchDomainsRequest, dict]] = None, + *, + location: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.SearchDomainsResponse: + r"""Searches for available domain names similar to the provided + query. + + Availability results from this method are approximate; call + ``RetrieveRegisterParameters`` on a domain before registering to + confirm availability. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + async def sample_search_domains(): + # Create a client + client = domains_v1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1.SearchDomainsRequest( + query="query_value", + location="location_value", + ) + + # Make the request + response = await client.search_domains(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1.types.SearchDomainsRequest, dict]]): + The request object. Request for the ``SearchDomains`` method. + location (:class:`str`): + Required. The location. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (:class:`str`): + Required. String used to search for + available domain names. + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1.types.SearchDomainsResponse: + Response for the SearchDomains method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location, query]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.SearchDomainsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_domains, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def retrieve_register_parameters( + self, + request: Optional[ + Union[domains.RetrieveRegisterParametersRequest, dict] + ] = None, + *, + location: Optional[str] = None, + domain_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.RetrieveRegisterParametersResponse: + r"""Gets parameters needed to register a new domain name, including + price and up-to-date availability. Use the returned values to + call ``RegisterDomain``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + async def sample_retrieve_register_parameters(): + # Create a client + client = domains_v1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1.RetrieveRegisterParametersRequest( + domain_name="domain_name_value", + location="location_value", + ) + + # Make the request + response = await client.retrieve_register_parameters(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1.types.RetrieveRegisterParametersRequest, dict]]): + The request object. Request for the ``RetrieveRegisterParameters`` method. + location (:class:`str`): + Required. The location. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + domain_name (:class:`str`): + Required. The domain name. Unicode + domain names must be expressed in + Punycode format. + + This corresponds to the ``domain_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1.types.RetrieveRegisterParametersResponse: + Response for the RetrieveRegisterParameters method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location, domain_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.RetrieveRegisterParametersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + if domain_name is not None: + request.domain_name = domain_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.retrieve_register_parameters, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def register_domain( + self, + request: Optional[Union[domains.RegisterDomainRequest, dict]] = None, + *, + parent: Optional[str] = None, + registration: Optional[domains.Registration] = None, + yearly_price: Optional[money_pb2.Money] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Registers a new domain name and creates a corresponding + ``Registration`` resource. + + Call ``RetrieveRegisterParameters`` first to check availability + of the domain name and determine parameters like price that are + needed to build a call to this method. + + A successful call creates a ``Registration`` resource in state + ``REGISTRATION_PENDING``, which resolves to ``ACTIVE`` within + 1-2 minutes, indicating that the domain was successfully + registered. If the resource ends up in state + ``REGISTRATION_FAILED``, it indicates that the domain was not + registered successfully, and you can safely delete the resource + and retry registration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + async def sample_register_domain(): + # Create a client + client = domains_v1.DomainsAsyncClient() + + # Initialize request argument(s) + registration = domains_v1.Registration() + registration.domain_name = "domain_name_value" + registration.contact_settings.privacy = "REDACTED_CONTACT_DATA" + registration.contact_settings.registrant_contact.email = "email_value" + registration.contact_settings.registrant_contact.phone_number = "phone_number_value" + registration.contact_settings.admin_contact.email = "email_value" + registration.contact_settings.admin_contact.phone_number = "phone_number_value" + registration.contact_settings.technical_contact.email = "email_value" + registration.contact_settings.technical_contact.phone_number = "phone_number_value" + + request = domains_v1.RegisterDomainRequest( + parent="parent_value", + registration=registration, + ) + + # Make the request + operation = client.register_domain(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1.types.RegisterDomainRequest, dict]]): + The request object. Request for the ``RegisterDomain`` method. + parent (:class:`str`): + Required. The parent resource of the ``Registration``. + Must be in the format ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + registration (:class:`google.cloud.domains_v1.types.Registration`): + Required. The complete ``Registration`` resource to be + created. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + yearly_price (:class:`google.type.money_pb2.Money`): + Required. Yearly price to register or + renew the domain. The value that should + be put here can be obtained from + RetrieveRegisterParameters or + SearchDomains calls. + + This corresponds to the ``yearly_price`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, registration, yearly_price]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.RegisterDomainRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if registration is not None: + request.registration = registration + if yearly_price is not None: + request.yearly_price = yearly_price + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.register_domain, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def retrieve_transfer_parameters( + self, + request: Optional[ + Union[domains.RetrieveTransferParametersRequest, dict] + ] = None, + *, + location: Optional[str] = None, + domain_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.RetrieveTransferParametersResponse: + r"""Gets parameters needed to transfer a domain name from another + registrar to Cloud Domains. For domains managed by Google + Domains, transferring to Cloud Domains is not supported. + + Use the returned values to call ``TransferDomain``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + async def sample_retrieve_transfer_parameters(): + # Create a client + client = domains_v1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1.RetrieveTransferParametersRequest( + domain_name="domain_name_value", + location="location_value", + ) + + # Make the request + response = await client.retrieve_transfer_parameters(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1.types.RetrieveTransferParametersRequest, dict]]): + The request object. Request for the ``RetrieveTransferParameters`` method. + location (:class:`str`): + Required. The location. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + domain_name (:class:`str`): + Required. The domain name. Unicode + domain names must be expressed in + Punycode format. + + This corresponds to the ``domain_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1.types.RetrieveTransferParametersResponse: + Response for the RetrieveTransferParameters method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location, domain_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.RetrieveTransferParametersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + if domain_name is not None: + request.domain_name = domain_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.retrieve_transfer_parameters, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def transfer_domain( + self, + request: Optional[Union[domains.TransferDomainRequest, dict]] = None, + *, + parent: Optional[str] = None, + registration: Optional[domains.Registration] = None, + yearly_price: Optional[money_pb2.Money] = None, + authorization_code: Optional[domains.AuthorizationCode] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Transfers a domain name from another registrar to Cloud Domains. + For domains managed by Google Domains, transferring to Cloud + Domains is not supported. + + Before calling this method, go to the domain's current registrar + to unlock the domain for transfer and retrieve the domain's + transfer authorization code. Then call + ``RetrieveTransferParameters`` to confirm that the domain is + unlocked and to get values needed to build a call to this + method. + + A successful call creates a ``Registration`` resource in state + ``TRANSFER_PENDING``. It can take several days to complete the + transfer process. The registrant can often speed up this process + by approving the transfer through the current registrar, either + by clicking a link in an email from the registrar or by visiting + the registrar's website. + + A few minutes after transfer approval, the resource transitions + to state ``ACTIVE``, indicating that the transfer was + successful. If the transfer is rejected or the request expires + without being approved, the resource can end up in state + ``TRANSFER_FAILED``. If transfer fails, you can safely delete + the resource and retry the transfer. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + async def sample_transfer_domain(): + # Create a client + client = domains_v1.DomainsAsyncClient() + + # Initialize request argument(s) + registration = domains_v1.Registration() + registration.domain_name = "domain_name_value" + registration.contact_settings.privacy = "REDACTED_CONTACT_DATA" + registration.contact_settings.registrant_contact.email = "email_value" + registration.contact_settings.registrant_contact.phone_number = "phone_number_value" + registration.contact_settings.admin_contact.email = "email_value" + registration.contact_settings.admin_contact.phone_number = "phone_number_value" + registration.contact_settings.technical_contact.email = "email_value" + registration.contact_settings.technical_contact.phone_number = "phone_number_value" + + request = domains_v1.TransferDomainRequest( + parent="parent_value", + registration=registration, + ) + + # Make the request + operation = client.transfer_domain(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1.types.TransferDomainRequest, dict]]): + The request object. Request for the ``TransferDomain`` method. + parent (:class:`str`): + Required. The parent resource of the ``Registration``. + Must be in the format ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + registration (:class:`google.cloud.domains_v1.types.Registration`): + Required. The complete ``Registration`` resource to be + created. + + You can leave ``registration.dns_settings`` unset to + import the domain's current DNS configuration from its + current registrar. Use this option only if you are sure + that the domain's current DNS service does not cease + upon transfer, as is often the case for DNS services + provided for free by the registrar. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + yearly_price (:class:`google.type.money_pb2.Money`): + Required. Acknowledgement of the price to transfer or + renew the domain for one year. Call + ``RetrieveTransferParameters`` to obtain the price, + which you must acknowledge. + + This corresponds to the ``yearly_price`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + authorization_code (:class:`google.cloud.domains_v1.types.AuthorizationCode`): + The domain's transfer authorization + code. You can obtain this from the + domain's current registrar. + + This corresponds to the ``authorization_code`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, registration, yearly_price, authorization_code] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.TransferDomainRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if registration is not None: + request.registration = registration + if yearly_price is not None: + request.yearly_price = yearly_price + if authorization_code is not None: + request.authorization_code = authorization_code + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.transfer_domain, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_registrations( + self, + request: Optional[Union[domains.ListRegistrationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRegistrationsAsyncPager: + r"""Lists the ``Registration`` resources in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + async def sample_list_registrations(): + # Create a client + client = domains_v1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1.ListRegistrationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_registrations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1.types.ListRegistrationsRequest, dict]]): + The request object. Request for the ``ListRegistrations`` method. + parent (:class:`str`): + Required. The project and location from which to list + ``Registration``\ s, specified in the format + ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1.services.domains.pagers.ListRegistrationsAsyncPager: + Response for the ListRegistrations method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.ListRegistrationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_registrations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListRegistrationsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_registration( + self, + request: Optional[Union[domains.GetRegistrationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.Registration: + r"""Gets the details of a ``Registration`` resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + async def sample_get_registration(): + # Create a client + client = domains_v1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1.GetRegistrationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_registration(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1.types.GetRegistrationRequest, dict]]): + The request object. Request for the ``GetRegistration`` method. + name (:class:`str`): + Required. The name of the ``Registration`` to get, in + the format ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1.types.Registration: + The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.GetRegistrationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_registration, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_registration( + self, + request: Optional[Union[domains.UpdateRegistrationRequest, dict]] = None, + *, + registration: Optional[domains.Registration] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates select fields of a ``Registration`` resource, notably + ``labels``. To update other fields, use the appropriate custom + update method: + + - To update management settings, see + ``ConfigureManagementSettings`` + - To update DNS configuration, see ``ConfigureDnsSettings`` + - To update contact information, see + ``ConfigureContactSettings`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + async def sample_update_registration(): + # Create a client + client = domains_v1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1.UpdateRegistrationRequest( + ) + + # Make the request + operation = client.update_registration(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1.types.UpdateRegistrationRequest, dict]]): + The request object. Request for the ``UpdateRegistration`` method. + registration (:class:`google.cloud.domains_v1.types.Registration`): + Fields of the ``Registration`` to update. + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the labels are being updated, the ``update_mask`` is + ``"labels"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.UpdateRegistrationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_registration, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration.name", request.registration.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def configure_management_settings( + self, + request: Optional[ + Union[domains.ConfigureManagementSettingsRequest, dict] + ] = None, + *, + registration: Optional[str] = None, + management_settings: Optional[domains.ManagementSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a ``Registration``'s management settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + async def sample_configure_management_settings(): + # Create a client + client = domains_v1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1.ConfigureManagementSettingsRequest( + registration="registration_value", + ) + + # Make the request + operation = client.configure_management_settings(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1.types.ConfigureManagementSettingsRequest, dict]]): + The request object. Request for the ``ConfigureManagementSettings`` method. + registration (:class:`str`): + Required. The name of the ``Registration`` whose + management settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + management_settings (:class:`google.cloud.domains_v1.types.ManagementSettings`): + Fields of the ``ManagementSettings`` to update. + This corresponds to the ``management_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the transfer lock is being updated, the ``update_mask`` + is ``"transfer_lock_state"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, management_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.ConfigureManagementSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if management_settings is not None: + request.management_settings = management_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.configure_management_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def configure_dns_settings( + self, + request: Optional[Union[domains.ConfigureDnsSettingsRequest, dict]] = None, + *, + registration: Optional[str] = None, + dns_settings: Optional[domains.DnsSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a ``Registration``'s DNS settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + async def sample_configure_dns_settings(): + # Create a client + client = domains_v1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1.ConfigureDnsSettingsRequest( + registration="registration_value", + ) + + # Make the request + operation = client.configure_dns_settings(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1.types.ConfigureDnsSettingsRequest, dict]]): + The request object. Request for the ``ConfigureDnsSettings`` method. + registration (:class:`str`): + Required. The name of the ``Registration`` whose DNS + settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dns_settings (:class:`google.cloud.domains_v1.types.DnsSettings`): + Fields of the ``DnsSettings`` to update. + This corresponds to the ``dns_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the name servers are being updated for an existing + Custom DNS configuration, the ``update_mask`` is + ``"custom_dns.name_servers"``. + + When changing the DNS provider from one type to another, + pass the new provider's field name as part of the field + mask. For example, when changing from a Google Domains + DNS configuration to a Custom DNS configuration, the + ``update_mask`` is ``"custom_dns"``. // + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, dns_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.ConfigureDnsSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if dns_settings is not None: + request.dns_settings = dns_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.configure_dns_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def configure_contact_settings( + self, + request: Optional[Union[domains.ConfigureContactSettingsRequest, dict]] = None, + *, + registration: Optional[str] = None, + contact_settings: Optional[domains.ContactSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a ``Registration``'s contact settings. Some changes + require confirmation by the domain's registrant contact . + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + async def sample_configure_contact_settings(): + # Create a client + client = domains_v1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1.ConfigureContactSettingsRequest( + registration="registration_value", + ) + + # Make the request + operation = client.configure_contact_settings(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1.types.ConfigureContactSettingsRequest, dict]]): + The request object. Request for the ``ConfigureContactSettings`` method. + registration (:class:`str`): + Required. The name of the ``Registration`` whose contact + settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contact_settings (:class:`google.cloud.domains_v1.types.ContactSettings`): + Fields of the ``ContactSettings`` to update. + This corresponds to the ``contact_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the registrant contact is being updated, the + ``update_mask`` is ``"registrant_contact"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, contact_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.ConfigureContactSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if contact_settings is not None: + request.contact_settings = contact_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.configure_contact_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def export_registration( + self, + request: Optional[Union[domains.ExportRegistrationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports a ``Registration`` resource, such that it is no longer + managed by Cloud Domains. + + When an active domain is successfully exported, you can continue + to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + async def sample_export_registration(): + # Create a client + client = domains_v1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1.ExportRegistrationRequest( + name="name_value", + ) + + # Make the request + operation = client.export_registration(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1.types.ExportRegistrationRequest, dict]]): + The request object. Request for the ``ExportRegistration`` method. + name (:class:`str`): + Required. The name of the ``Registration`` to export, in + the format ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.ExportRegistrationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_registration, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_registration( + self, + request: Optional[Union[domains.DeleteRegistrationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a ``Registration`` resource. + + This method works on any ``Registration`` resource using + `Subscription or Commitment + billing `__, provided that the + resource was created at least 1 day in the past. + + For ``Registration`` resources using `Monthly + billing `__, this method works + if: + + - ``state`` is ``EXPORTED`` with ``expire_time`` in the past + - ``state`` is ``REGISTRATION_FAILED`` + - ``state`` is ``TRANSFER_FAILED`` + + When an active registration is successfully deleted, you can + continue to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + async def sample_delete_registration(): + # Create a client + client = domains_v1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1.DeleteRegistrationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_registration(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1.types.DeleteRegistrationRequest, dict]]): + The request object. Request for the ``DeleteRegistration`` method. + name (:class:`str`): + Required. The name of the ``Registration`` to delete, in + the format ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.DeleteRegistrationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_registration, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def retrieve_authorization_code( + self, + request: Optional[Union[domains.RetrieveAuthorizationCodeRequest, dict]] = None, + *, + registration: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.AuthorizationCode: + r"""Gets the authorization code of the ``Registration`` for the + purpose of transferring the domain to another registrar. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + async def sample_retrieve_authorization_code(): + # Create a client + client = domains_v1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1.RetrieveAuthorizationCodeRequest( + registration="registration_value", + ) + + # Make the request + response = await client.retrieve_authorization_code(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1.types.RetrieveAuthorizationCodeRequest, dict]]): + The request object. Request for the ``RetrieveAuthorizationCode`` method. + registration (:class:`str`): + Required. The name of the ``Registration`` whose + authorization code is being retrieved, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1.types.AuthorizationCode: + Defines an authorization code. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.RetrieveAuthorizationCodeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.retrieve_authorization_code, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def reset_authorization_code( + self, + request: Optional[Union[domains.ResetAuthorizationCodeRequest, dict]] = None, + *, + registration: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.AuthorizationCode: + r"""Resets the authorization code of the ``Registration`` to a new + random string. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + async def sample_reset_authorization_code(): + # Create a client + client = domains_v1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1.ResetAuthorizationCodeRequest( + registration="registration_value", + ) + + # Make the request + response = await client.reset_authorization_code(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1.types.ResetAuthorizationCodeRequest, dict]]): + The request object. Request for the ``ResetAuthorizationCode`` method. + registration (:class:`str`): + Required. The name of the ``Registration`` whose + authorization code is being reset, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1.types.AuthorizationCode: + Defines an authorization code. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.ResetAuthorizationCodeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reset_authorization_code, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DomainsAsyncClient",) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/client.py b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/client.py new file mode 100644 index 000000000000..1b027ec7f902 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/client.py @@ -0,0 +1,2595 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.domains_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import money_pb2 # type: ignore + +from google.cloud.domains_v1.services.domains import pagers +from google.cloud.domains_v1.types import domains + +from .transports.base import DEFAULT_CLIENT_INFO, DomainsTransport +from .transports.grpc import DomainsGrpcTransport +from .transports.grpc_asyncio import DomainsGrpcAsyncIOTransport +from .transports.rest import DomainsRestTransport + + +class DomainsClientMeta(type): + """Metaclass for the Domains client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[DomainsTransport]] + _transport_registry["grpc"] = DomainsGrpcTransport + _transport_registry["grpc_asyncio"] = DomainsGrpcAsyncIOTransport + _transport_registry["rest"] = DomainsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DomainsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DomainsClient(metaclass=DomainsClientMeta): + """The Cloud Domains API enables management and configuration of + domain names. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "domains.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DomainsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DomainsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DomainsTransport: + """Returns the transport used by the client instance. + + Returns: + DomainsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def registration_path( + project: str, + location: str, + registration: str, + ) -> str: + """Returns a fully-qualified registration string.""" + return "projects/{project}/locations/{location}/registrations/{registration}".format( + project=project, + location=location, + registration=registration, + ) + + @staticmethod + def parse_registration_path(path: str) -> Dict[str, str]: + """Parses a registration path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/registrations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DomainsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the domains client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DomainsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DomainsTransport): + # transport is a DomainsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def search_domains( + self, + request: Optional[Union[domains.SearchDomainsRequest, dict]] = None, + *, + location: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.SearchDomainsResponse: + r"""Searches for available domain names similar to the provided + query. + + Availability results from this method are approximate; call + ``RetrieveRegisterParameters`` on a domain before registering to + confirm availability. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + def sample_search_domains(): + # Create a client + client = domains_v1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1.SearchDomainsRequest( + query="query_value", + location="location_value", + ) + + # Make the request + response = client.search_domains(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1.types.SearchDomainsRequest, dict]): + The request object. Request for the ``SearchDomains`` method. + location (str): + Required. The location. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (str): + Required. String used to search for + available domain names. + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1.types.SearchDomainsResponse: + Response for the SearchDomains method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location, query]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.SearchDomainsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.SearchDomainsRequest): + request = domains.SearchDomainsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_domains] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def retrieve_register_parameters( + self, + request: Optional[ + Union[domains.RetrieveRegisterParametersRequest, dict] + ] = None, + *, + location: Optional[str] = None, + domain_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.RetrieveRegisterParametersResponse: + r"""Gets parameters needed to register a new domain name, including + price and up-to-date availability. Use the returned values to + call ``RegisterDomain``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + def sample_retrieve_register_parameters(): + # Create a client + client = domains_v1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1.RetrieveRegisterParametersRequest( + domain_name="domain_name_value", + location="location_value", + ) + + # Make the request + response = client.retrieve_register_parameters(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1.types.RetrieveRegisterParametersRequest, dict]): + The request object. Request for the ``RetrieveRegisterParameters`` method. + location (str): + Required. The location. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + domain_name (str): + Required. The domain name. Unicode + domain names must be expressed in + Punycode format. + + This corresponds to the ``domain_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1.types.RetrieveRegisterParametersResponse: + Response for the RetrieveRegisterParameters method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location, domain_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.RetrieveRegisterParametersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.RetrieveRegisterParametersRequest): + request = domains.RetrieveRegisterParametersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + if domain_name is not None: + request.domain_name = domain_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.retrieve_register_parameters + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def register_domain( + self, + request: Optional[Union[domains.RegisterDomainRequest, dict]] = None, + *, + parent: Optional[str] = None, + registration: Optional[domains.Registration] = None, + yearly_price: Optional[money_pb2.Money] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Registers a new domain name and creates a corresponding + ``Registration`` resource. + + Call ``RetrieveRegisterParameters`` first to check availability + of the domain name and determine parameters like price that are + needed to build a call to this method. + + A successful call creates a ``Registration`` resource in state + ``REGISTRATION_PENDING``, which resolves to ``ACTIVE`` within + 1-2 minutes, indicating that the domain was successfully + registered. If the resource ends up in state + ``REGISTRATION_FAILED``, it indicates that the domain was not + registered successfully, and you can safely delete the resource + and retry registration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + def sample_register_domain(): + # Create a client + client = domains_v1.DomainsClient() + + # Initialize request argument(s) + registration = domains_v1.Registration() + registration.domain_name = "domain_name_value" + registration.contact_settings.privacy = "REDACTED_CONTACT_DATA" + registration.contact_settings.registrant_contact.email = "email_value" + registration.contact_settings.registrant_contact.phone_number = "phone_number_value" + registration.contact_settings.admin_contact.email = "email_value" + registration.contact_settings.admin_contact.phone_number = "phone_number_value" + registration.contact_settings.technical_contact.email = "email_value" + registration.contact_settings.technical_contact.phone_number = "phone_number_value" + + request = domains_v1.RegisterDomainRequest( + parent="parent_value", + registration=registration, + ) + + # Make the request + operation = client.register_domain(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1.types.RegisterDomainRequest, dict]): + The request object. Request for the ``RegisterDomain`` method. + parent (str): + Required. The parent resource of the ``Registration``. + Must be in the format ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + registration (google.cloud.domains_v1.types.Registration): + Required. The complete ``Registration`` resource to be + created. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + yearly_price (google.type.money_pb2.Money): + Required. Yearly price to register or + renew the domain. The value that should + be put here can be obtained from + RetrieveRegisterParameters or + SearchDomains calls. + + This corresponds to the ``yearly_price`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, registration, yearly_price]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.RegisterDomainRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.RegisterDomainRequest): + request = domains.RegisterDomainRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if registration is not None: + request.registration = registration + if yearly_price is not None: + request.yearly_price = yearly_price + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.register_domain] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def retrieve_transfer_parameters( + self, + request: Optional[ + Union[domains.RetrieveTransferParametersRequest, dict] + ] = None, + *, + location: Optional[str] = None, + domain_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.RetrieveTransferParametersResponse: + r"""Gets parameters needed to transfer a domain name from another + registrar to Cloud Domains. For domains managed by Google + Domains, transferring to Cloud Domains is not supported. + + Use the returned values to call ``TransferDomain``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + def sample_retrieve_transfer_parameters(): + # Create a client + client = domains_v1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1.RetrieveTransferParametersRequest( + domain_name="domain_name_value", + location="location_value", + ) + + # Make the request + response = client.retrieve_transfer_parameters(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1.types.RetrieveTransferParametersRequest, dict]): + The request object. Request for the ``RetrieveTransferParameters`` method. + location (str): + Required. The location. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + domain_name (str): + Required. The domain name. Unicode + domain names must be expressed in + Punycode format. + + This corresponds to the ``domain_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1.types.RetrieveTransferParametersResponse: + Response for the RetrieveTransferParameters method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location, domain_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.RetrieveTransferParametersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.RetrieveTransferParametersRequest): + request = domains.RetrieveTransferParametersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + if domain_name is not None: + request.domain_name = domain_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.retrieve_transfer_parameters + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def transfer_domain( + self, + request: Optional[Union[domains.TransferDomainRequest, dict]] = None, + *, + parent: Optional[str] = None, + registration: Optional[domains.Registration] = None, + yearly_price: Optional[money_pb2.Money] = None, + authorization_code: Optional[domains.AuthorizationCode] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Transfers a domain name from another registrar to Cloud Domains. + For domains managed by Google Domains, transferring to Cloud + Domains is not supported. + + Before calling this method, go to the domain's current registrar + to unlock the domain for transfer and retrieve the domain's + transfer authorization code. Then call + ``RetrieveTransferParameters`` to confirm that the domain is + unlocked and to get values needed to build a call to this + method. + + A successful call creates a ``Registration`` resource in state + ``TRANSFER_PENDING``. It can take several days to complete the + transfer process. The registrant can often speed up this process + by approving the transfer through the current registrar, either + by clicking a link in an email from the registrar or by visiting + the registrar's website. + + A few minutes after transfer approval, the resource transitions + to state ``ACTIVE``, indicating that the transfer was + successful. If the transfer is rejected or the request expires + without being approved, the resource can end up in state + ``TRANSFER_FAILED``. If transfer fails, you can safely delete + the resource and retry the transfer. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + def sample_transfer_domain(): + # Create a client + client = domains_v1.DomainsClient() + + # Initialize request argument(s) + registration = domains_v1.Registration() + registration.domain_name = "domain_name_value" + registration.contact_settings.privacy = "REDACTED_CONTACT_DATA" + registration.contact_settings.registrant_contact.email = "email_value" + registration.contact_settings.registrant_contact.phone_number = "phone_number_value" + registration.contact_settings.admin_contact.email = "email_value" + registration.contact_settings.admin_contact.phone_number = "phone_number_value" + registration.contact_settings.technical_contact.email = "email_value" + registration.contact_settings.technical_contact.phone_number = "phone_number_value" + + request = domains_v1.TransferDomainRequest( + parent="parent_value", + registration=registration, + ) + + # Make the request + operation = client.transfer_domain(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1.types.TransferDomainRequest, dict]): + The request object. Request for the ``TransferDomain`` method. + parent (str): + Required. The parent resource of the ``Registration``. + Must be in the format ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + registration (google.cloud.domains_v1.types.Registration): + Required. The complete ``Registration`` resource to be + created. + + You can leave ``registration.dns_settings`` unset to + import the domain's current DNS configuration from its + current registrar. Use this option only if you are sure + that the domain's current DNS service does not cease + upon transfer, as is often the case for DNS services + provided for free by the registrar. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + yearly_price (google.type.money_pb2.Money): + Required. Acknowledgement of the price to transfer or + renew the domain for one year. Call + ``RetrieveTransferParameters`` to obtain the price, + which you must acknowledge. + + This corresponds to the ``yearly_price`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + authorization_code (google.cloud.domains_v1.types.AuthorizationCode): + The domain's transfer authorization + code. You can obtain this from the + domain's current registrar. + + This corresponds to the ``authorization_code`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, registration, yearly_price, authorization_code] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.TransferDomainRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.TransferDomainRequest): + request = domains.TransferDomainRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if registration is not None: + request.registration = registration + if yearly_price is not None: + request.yearly_price = yearly_price + if authorization_code is not None: + request.authorization_code = authorization_code + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.transfer_domain] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_registrations( + self, + request: Optional[Union[domains.ListRegistrationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRegistrationsPager: + r"""Lists the ``Registration`` resources in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + def sample_list_registrations(): + # Create a client + client = domains_v1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1.ListRegistrationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_registrations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.domains_v1.types.ListRegistrationsRequest, dict]): + The request object. Request for the ``ListRegistrations`` method. + parent (str): + Required. The project and location from which to list + ``Registration``\ s, specified in the format + ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1.services.domains.pagers.ListRegistrationsPager: + Response for the ListRegistrations method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.ListRegistrationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.ListRegistrationsRequest): + request = domains.ListRegistrationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_registrations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListRegistrationsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_registration( + self, + request: Optional[Union[domains.GetRegistrationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.Registration: + r"""Gets the details of a ``Registration`` resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + def sample_get_registration(): + # Create a client + client = domains_v1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1.GetRegistrationRequest( + name="name_value", + ) + + # Make the request + response = client.get_registration(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1.types.GetRegistrationRequest, dict]): + The request object. Request for the ``GetRegistration`` method. + name (str): + Required. The name of the ``Registration`` to get, in + the format ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1.types.Registration: + The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.GetRegistrationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.GetRegistrationRequest): + request = domains.GetRegistrationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_registration] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_registration( + self, + request: Optional[Union[domains.UpdateRegistrationRequest, dict]] = None, + *, + registration: Optional[domains.Registration] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates select fields of a ``Registration`` resource, notably + ``labels``. To update other fields, use the appropriate custom + update method: + + - To update management settings, see + ``ConfigureManagementSettings`` + - To update DNS configuration, see ``ConfigureDnsSettings`` + - To update contact information, see + ``ConfigureContactSettings`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + def sample_update_registration(): + # Create a client + client = domains_v1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1.UpdateRegistrationRequest( + ) + + # Make the request + operation = client.update_registration(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1.types.UpdateRegistrationRequest, dict]): + The request object. Request for the ``UpdateRegistration`` method. + registration (google.cloud.domains_v1.types.Registration): + Fields of the ``Registration`` to update. + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the labels are being updated, the ``update_mask`` is + ``"labels"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.UpdateRegistrationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.UpdateRegistrationRequest): + request = domains.UpdateRegistrationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_registration] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration.name", request.registration.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def configure_management_settings( + self, + request: Optional[ + Union[domains.ConfigureManagementSettingsRequest, dict] + ] = None, + *, + registration: Optional[str] = None, + management_settings: Optional[domains.ManagementSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a ``Registration``'s management settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + def sample_configure_management_settings(): + # Create a client + client = domains_v1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1.ConfigureManagementSettingsRequest( + registration="registration_value", + ) + + # Make the request + operation = client.configure_management_settings(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1.types.ConfigureManagementSettingsRequest, dict]): + The request object. Request for the ``ConfigureManagementSettings`` method. + registration (str): + Required. The name of the ``Registration`` whose + management settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + management_settings (google.cloud.domains_v1.types.ManagementSettings): + Fields of the ``ManagementSettings`` to update. + This corresponds to the ``management_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the transfer lock is being updated, the ``update_mask`` + is ``"transfer_lock_state"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, management_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.ConfigureManagementSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.ConfigureManagementSettingsRequest): + request = domains.ConfigureManagementSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if management_settings is not None: + request.management_settings = management_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.configure_management_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def configure_dns_settings( + self, + request: Optional[Union[domains.ConfigureDnsSettingsRequest, dict]] = None, + *, + registration: Optional[str] = None, + dns_settings: Optional[domains.DnsSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a ``Registration``'s DNS settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + def sample_configure_dns_settings(): + # Create a client + client = domains_v1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1.ConfigureDnsSettingsRequest( + registration="registration_value", + ) + + # Make the request + operation = client.configure_dns_settings(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1.types.ConfigureDnsSettingsRequest, dict]): + The request object. Request for the ``ConfigureDnsSettings`` method. + registration (str): + Required. The name of the ``Registration`` whose DNS + settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dns_settings (google.cloud.domains_v1.types.DnsSettings): + Fields of the ``DnsSettings`` to update. + This corresponds to the ``dns_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the name servers are being updated for an existing + Custom DNS configuration, the ``update_mask`` is + ``"custom_dns.name_servers"``. + + When changing the DNS provider from one type to another, + pass the new provider's field name as part of the field + mask. For example, when changing from a Google Domains + DNS configuration to a Custom DNS configuration, the + ``update_mask`` is ``"custom_dns"``. // + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, dns_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.ConfigureDnsSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.ConfigureDnsSettingsRequest): + request = domains.ConfigureDnsSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if dns_settings is not None: + request.dns_settings = dns_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.configure_dns_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def configure_contact_settings( + self, + request: Optional[Union[domains.ConfigureContactSettingsRequest, dict]] = None, + *, + registration: Optional[str] = None, + contact_settings: Optional[domains.ContactSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a ``Registration``'s contact settings. Some changes + require confirmation by the domain's registrant contact . + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + def sample_configure_contact_settings(): + # Create a client + client = domains_v1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1.ConfigureContactSettingsRequest( + registration="registration_value", + ) + + # Make the request + operation = client.configure_contact_settings(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1.types.ConfigureContactSettingsRequest, dict]): + The request object. Request for the ``ConfigureContactSettings`` method. + registration (str): + Required. The name of the ``Registration`` whose contact + settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contact_settings (google.cloud.domains_v1.types.ContactSettings): + Fields of the ``ContactSettings`` to update. + This corresponds to the ``contact_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the registrant contact is being updated, the + ``update_mask`` is ``"registrant_contact"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, contact_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.ConfigureContactSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.ConfigureContactSettingsRequest): + request = domains.ConfigureContactSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if contact_settings is not None: + request.contact_settings = contact_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.configure_contact_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def export_registration( + self, + request: Optional[Union[domains.ExportRegistrationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports a ``Registration`` resource, such that it is no longer + managed by Cloud Domains. + + When an active domain is successfully exported, you can continue + to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + def sample_export_registration(): + # Create a client + client = domains_v1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1.ExportRegistrationRequest( + name="name_value", + ) + + # Make the request + operation = client.export_registration(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1.types.ExportRegistrationRequest, dict]): + The request object. Request for the ``ExportRegistration`` method. + name (str): + Required. The name of the ``Registration`` to export, in + the format ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.ExportRegistrationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.ExportRegistrationRequest): + request = domains.ExportRegistrationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_registration] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_registration( + self, + request: Optional[Union[domains.DeleteRegistrationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a ``Registration`` resource. + + This method works on any ``Registration`` resource using + `Subscription or Commitment + billing `__, provided that the + resource was created at least 1 day in the past. + + For ``Registration`` resources using `Monthly + billing `__, this method works + if: + + - ``state`` is ``EXPORTED`` with ``expire_time`` in the past + - ``state`` is ``REGISTRATION_FAILED`` + - ``state`` is ``TRANSFER_FAILED`` + + When an active registration is successfully deleted, you can + continue to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + def sample_delete_registration(): + # Create a client + client = domains_v1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1.DeleteRegistrationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_registration(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1.types.DeleteRegistrationRequest, dict]): + The request object. Request for the ``DeleteRegistration`` method. + name (str): + Required. The name of the ``Registration`` to delete, in + the format ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.DeleteRegistrationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.DeleteRegistrationRequest): + request = domains.DeleteRegistrationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_registration] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def retrieve_authorization_code( + self, + request: Optional[Union[domains.RetrieveAuthorizationCodeRequest, dict]] = None, + *, + registration: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.AuthorizationCode: + r"""Gets the authorization code of the ``Registration`` for the + purpose of transferring the domain to another registrar. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + def sample_retrieve_authorization_code(): + # Create a client + client = domains_v1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1.RetrieveAuthorizationCodeRequest( + registration="registration_value", + ) + + # Make the request + response = client.retrieve_authorization_code(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1.types.RetrieveAuthorizationCodeRequest, dict]): + The request object. Request for the ``RetrieveAuthorizationCode`` method. + registration (str): + Required. The name of the ``Registration`` whose + authorization code is being retrieved, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1.types.AuthorizationCode: + Defines an authorization code. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.RetrieveAuthorizationCodeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.RetrieveAuthorizationCodeRequest): + request = domains.RetrieveAuthorizationCodeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.retrieve_authorization_code + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def reset_authorization_code( + self, + request: Optional[Union[domains.ResetAuthorizationCodeRequest, dict]] = None, + *, + registration: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.AuthorizationCode: + r"""Resets the authorization code of the ``Registration`` to a new + random string. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1 + + def sample_reset_authorization_code(): + # Create a client + client = domains_v1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1.ResetAuthorizationCodeRequest( + registration="registration_value", + ) + + # Make the request + response = client.reset_authorization_code(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1.types.ResetAuthorizationCodeRequest, dict]): + The request object. Request for the ``ResetAuthorizationCode`` method. + registration (str): + Required. The name of the ``Registration`` whose + authorization code is being reset, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1.types.AuthorizationCode: + Defines an authorization code. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.ResetAuthorizationCodeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.ResetAuthorizationCodeRequest): + request = domains.ResetAuthorizationCodeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reset_authorization_code] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DomainsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DomainsClient",) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/pagers.py b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/pagers.py new file mode 100644 index 000000000000..4844a0f54c63 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.domains_v1.types import domains + + +class ListRegistrationsPager: + """A pager for iterating through ``list_registrations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.domains_v1.types.ListRegistrationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``registrations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListRegistrations`` requests and continue to iterate + through the ``registrations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.domains_v1.types.ListRegistrationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., domains.ListRegistrationsResponse], + request: domains.ListRegistrationsRequest, + response: domains.ListRegistrationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.domains_v1.types.ListRegistrationsRequest): + The initial request object. + response (google.cloud.domains_v1.types.ListRegistrationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = domains.ListRegistrationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[domains.ListRegistrationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[domains.Registration]: + for page in self.pages: + yield from page.registrations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRegistrationsAsyncPager: + """A pager for iterating through ``list_registrations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.domains_v1.types.ListRegistrationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``registrations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListRegistrations`` requests and continue to iterate + through the ``registrations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.domains_v1.types.ListRegistrationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[domains.ListRegistrationsResponse]], + request: domains.ListRegistrationsRequest, + response: domains.ListRegistrationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.domains_v1.types.ListRegistrationsRequest): + The initial request object. + response (google.cloud.domains_v1.types.ListRegistrationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = domains.ListRegistrationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[domains.ListRegistrationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[domains.Registration]: + async def async_generator(): + async for page in self.pages: + for response in page.registrations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/__init__.py b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/__init__.py new file mode 100644 index 000000000000..8351ddd28377 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DomainsTransport +from .grpc import DomainsGrpcTransport +from .grpc_asyncio import DomainsGrpcAsyncIOTransport +from .rest import DomainsRestInterceptor, DomainsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DomainsTransport]] +_transport_registry["grpc"] = DomainsGrpcTransport +_transport_registry["grpc_asyncio"] = DomainsGrpcAsyncIOTransport +_transport_registry["rest"] = DomainsRestTransport + +__all__ = ( + "DomainsTransport", + "DomainsGrpcTransport", + "DomainsGrpcAsyncIOTransport", + "DomainsRestTransport", + "DomainsRestInterceptor", +) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/base.py b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/base.py new file mode 100644 index 000000000000..6482556f34ba --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/base.py @@ -0,0 +1,366 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.domains_v1 import gapic_version as package_version +from google.cloud.domains_v1.types import domains + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class DomainsTransport(abc.ABC): + """Abstract transport class for Domains.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "domains.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.search_domains: gapic_v1.method.wrap_method( + self.search_domains, + default_timeout=None, + client_info=client_info, + ), + self.retrieve_register_parameters: gapic_v1.method.wrap_method( + self.retrieve_register_parameters, + default_timeout=None, + client_info=client_info, + ), + self.register_domain: gapic_v1.method.wrap_method( + self.register_domain, + default_timeout=None, + client_info=client_info, + ), + self.retrieve_transfer_parameters: gapic_v1.method.wrap_method( + self.retrieve_transfer_parameters, + default_timeout=None, + client_info=client_info, + ), + self.transfer_domain: gapic_v1.method.wrap_method( + self.transfer_domain, + default_timeout=None, + client_info=client_info, + ), + self.list_registrations: gapic_v1.method.wrap_method( + self.list_registrations, + default_timeout=None, + client_info=client_info, + ), + self.get_registration: gapic_v1.method.wrap_method( + self.get_registration, + default_timeout=None, + client_info=client_info, + ), + self.update_registration: gapic_v1.method.wrap_method( + self.update_registration, + default_timeout=None, + client_info=client_info, + ), + self.configure_management_settings: gapic_v1.method.wrap_method( + self.configure_management_settings, + default_timeout=None, + client_info=client_info, + ), + self.configure_dns_settings: gapic_v1.method.wrap_method( + self.configure_dns_settings, + default_timeout=None, + client_info=client_info, + ), + self.configure_contact_settings: gapic_v1.method.wrap_method( + self.configure_contact_settings, + default_timeout=None, + client_info=client_info, + ), + self.export_registration: gapic_v1.method.wrap_method( + self.export_registration, + default_timeout=None, + client_info=client_info, + ), + self.delete_registration: gapic_v1.method.wrap_method( + self.delete_registration, + default_timeout=None, + client_info=client_info, + ), + self.retrieve_authorization_code: gapic_v1.method.wrap_method( + self.retrieve_authorization_code, + default_timeout=None, + client_info=client_info, + ), + self.reset_authorization_code: gapic_v1.method.wrap_method( + self.reset_authorization_code, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def search_domains( + self, + ) -> Callable[ + [domains.SearchDomainsRequest], + Union[domains.SearchDomainsResponse, Awaitable[domains.SearchDomainsResponse]], + ]: + raise NotImplementedError() + + @property + def retrieve_register_parameters( + self, + ) -> Callable[ + [domains.RetrieveRegisterParametersRequest], + Union[ + domains.RetrieveRegisterParametersResponse, + Awaitable[domains.RetrieveRegisterParametersResponse], + ], + ]: + raise NotImplementedError() + + @property + def register_domain( + self, + ) -> Callable[ + [domains.RegisterDomainRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def retrieve_transfer_parameters( + self, + ) -> Callable[ + [domains.RetrieveTransferParametersRequest], + Union[ + domains.RetrieveTransferParametersResponse, + Awaitable[domains.RetrieveTransferParametersResponse], + ], + ]: + raise NotImplementedError() + + @property + def transfer_domain( + self, + ) -> Callable[ + [domains.TransferDomainRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_registrations( + self, + ) -> Callable[ + [domains.ListRegistrationsRequest], + Union[ + domains.ListRegistrationsResponse, + Awaitable[domains.ListRegistrationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_registration( + self, + ) -> Callable[ + [domains.GetRegistrationRequest], + Union[domains.Registration, Awaitable[domains.Registration]], + ]: + raise NotImplementedError() + + @property + def update_registration( + self, + ) -> Callable[ + [domains.UpdateRegistrationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def configure_management_settings( + self, + ) -> Callable[ + [domains.ConfigureManagementSettingsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def configure_dns_settings( + self, + ) -> Callable[ + [domains.ConfigureDnsSettingsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def configure_contact_settings( + self, + ) -> Callable[ + [domains.ConfigureContactSettingsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def export_registration( + self, + ) -> Callable[ + [domains.ExportRegistrationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_registration( + self, + ) -> Callable[ + [domains.DeleteRegistrationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def retrieve_authorization_code( + self, + ) -> Callable[ + [domains.RetrieveAuthorizationCodeRequest], + Union[domains.AuthorizationCode, Awaitable[domains.AuthorizationCode]], + ]: + raise NotImplementedError() + + @property + def reset_authorization_code( + self, + ) -> Callable[ + [domains.ResetAuthorizationCodeRequest], + Union[domains.AuthorizationCode, Awaitable[domains.AuthorizationCode]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DomainsTransport",) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/grpc.py b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/grpc.py new file mode 100644 index 000000000000..9f61593d4b6d --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/grpc.py @@ -0,0 +1,754 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.domains_v1.types import domains + +from .base import DEFAULT_CLIENT_INFO, DomainsTransport + + +class DomainsGrpcTransport(DomainsTransport): + """gRPC backend transport for Domains. + + The Cloud Domains API enables management and configuration of + domain names. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "domains.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "domains.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def search_domains( + self, + ) -> Callable[[domains.SearchDomainsRequest], domains.SearchDomainsResponse]: + r"""Return a callable for the search domains method over gRPC. + + Searches for available domain names similar to the provided + query. + + Availability results from this method are approximate; call + ``RetrieveRegisterParameters`` on a domain before registering to + confirm availability. + + Returns: + Callable[[~.SearchDomainsRequest], + ~.SearchDomainsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_domains" not in self._stubs: + self._stubs["search_domains"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/SearchDomains", + request_serializer=domains.SearchDomainsRequest.serialize, + response_deserializer=domains.SearchDomainsResponse.deserialize, + ) + return self._stubs["search_domains"] + + @property + def retrieve_register_parameters( + self, + ) -> Callable[ + [domains.RetrieveRegisterParametersRequest], + domains.RetrieveRegisterParametersResponse, + ]: + r"""Return a callable for the retrieve register parameters method over gRPC. + + Gets parameters needed to register a new domain name, including + price and up-to-date availability. Use the returned values to + call ``RegisterDomain``. + + Returns: + Callable[[~.RetrieveRegisterParametersRequest], + ~.RetrieveRegisterParametersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_register_parameters" not in self._stubs: + self._stubs["retrieve_register_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/RetrieveRegisterParameters", + request_serializer=domains.RetrieveRegisterParametersRequest.serialize, + response_deserializer=domains.RetrieveRegisterParametersResponse.deserialize, + ) + return self._stubs["retrieve_register_parameters"] + + @property + def register_domain( + self, + ) -> Callable[[domains.RegisterDomainRequest], operations_pb2.Operation]: + r"""Return a callable for the register domain method over gRPC. + + Registers a new domain name and creates a corresponding + ``Registration`` resource. + + Call ``RetrieveRegisterParameters`` first to check availability + of the domain name and determine parameters like price that are + needed to build a call to this method. + + A successful call creates a ``Registration`` resource in state + ``REGISTRATION_PENDING``, which resolves to ``ACTIVE`` within + 1-2 minutes, indicating that the domain was successfully + registered. If the resource ends up in state + ``REGISTRATION_FAILED``, it indicates that the domain was not + registered successfully, and you can safely delete the resource + and retry registration. + + Returns: + Callable[[~.RegisterDomainRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "register_domain" not in self._stubs: + self._stubs["register_domain"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/RegisterDomain", + request_serializer=domains.RegisterDomainRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["register_domain"] + + @property + def retrieve_transfer_parameters( + self, + ) -> Callable[ + [domains.RetrieveTransferParametersRequest], + domains.RetrieveTransferParametersResponse, + ]: + r"""Return a callable for the retrieve transfer parameters method over gRPC. + + Gets parameters needed to transfer a domain name from another + registrar to Cloud Domains. For domains managed by Google + Domains, transferring to Cloud Domains is not supported. + + Use the returned values to call ``TransferDomain``. + + Returns: + Callable[[~.RetrieveTransferParametersRequest], + ~.RetrieveTransferParametersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_transfer_parameters" not in self._stubs: + self._stubs["retrieve_transfer_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/RetrieveTransferParameters", + request_serializer=domains.RetrieveTransferParametersRequest.serialize, + response_deserializer=domains.RetrieveTransferParametersResponse.deserialize, + ) + return self._stubs["retrieve_transfer_parameters"] + + @property + def transfer_domain( + self, + ) -> Callable[[domains.TransferDomainRequest], operations_pb2.Operation]: + r"""Return a callable for the transfer domain method over gRPC. + + Transfers a domain name from another registrar to Cloud Domains. + For domains managed by Google Domains, transferring to Cloud + Domains is not supported. + + Before calling this method, go to the domain's current registrar + to unlock the domain for transfer and retrieve the domain's + transfer authorization code. Then call + ``RetrieveTransferParameters`` to confirm that the domain is + unlocked and to get values needed to build a call to this + method. + + A successful call creates a ``Registration`` resource in state + ``TRANSFER_PENDING``. It can take several days to complete the + transfer process. The registrant can often speed up this process + by approving the transfer through the current registrar, either + by clicking a link in an email from the registrar or by visiting + the registrar's website. + + A few minutes after transfer approval, the resource transitions + to state ``ACTIVE``, indicating that the transfer was + successful. If the transfer is rejected or the request expires + without being approved, the resource can end up in state + ``TRANSFER_FAILED``. If transfer fails, you can safely delete + the resource and retry the transfer. + + Returns: + Callable[[~.TransferDomainRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "transfer_domain" not in self._stubs: + self._stubs["transfer_domain"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/TransferDomain", + request_serializer=domains.TransferDomainRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["transfer_domain"] + + @property + def list_registrations( + self, + ) -> Callable[ + [domains.ListRegistrationsRequest], domains.ListRegistrationsResponse + ]: + r"""Return a callable for the list registrations method over gRPC. + + Lists the ``Registration`` resources in a project. + + Returns: + Callable[[~.ListRegistrationsRequest], + ~.ListRegistrationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_registrations" not in self._stubs: + self._stubs["list_registrations"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/ListRegistrations", + request_serializer=domains.ListRegistrationsRequest.serialize, + response_deserializer=domains.ListRegistrationsResponse.deserialize, + ) + return self._stubs["list_registrations"] + + @property + def get_registration( + self, + ) -> Callable[[domains.GetRegistrationRequest], domains.Registration]: + r"""Return a callable for the get registration method over gRPC. + + Gets the details of a ``Registration`` resource. + + Returns: + Callable[[~.GetRegistrationRequest], + ~.Registration]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_registration" not in self._stubs: + self._stubs["get_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/GetRegistration", + request_serializer=domains.GetRegistrationRequest.serialize, + response_deserializer=domains.Registration.deserialize, + ) + return self._stubs["get_registration"] + + @property + def update_registration( + self, + ) -> Callable[[domains.UpdateRegistrationRequest], operations_pb2.Operation]: + r"""Return a callable for the update registration method over gRPC. + + Updates select fields of a ``Registration`` resource, notably + ``labels``. To update other fields, use the appropriate custom + update method: + + - To update management settings, see + ``ConfigureManagementSettings`` + - To update DNS configuration, see ``ConfigureDnsSettings`` + - To update contact information, see + ``ConfigureContactSettings`` + + Returns: + Callable[[~.UpdateRegistrationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_registration" not in self._stubs: + self._stubs["update_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/UpdateRegistration", + request_serializer=domains.UpdateRegistrationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_registration"] + + @property + def configure_management_settings( + self, + ) -> Callable[ + [domains.ConfigureManagementSettingsRequest], operations_pb2.Operation + ]: + r"""Return a callable for the configure management settings method over gRPC. + + Updates a ``Registration``'s management settings. + + Returns: + Callable[[~.ConfigureManagementSettingsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "configure_management_settings" not in self._stubs: + self._stubs[ + "configure_management_settings" + ] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/ConfigureManagementSettings", + request_serializer=domains.ConfigureManagementSettingsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["configure_management_settings"] + + @property + def configure_dns_settings( + self, + ) -> Callable[[domains.ConfigureDnsSettingsRequest], operations_pb2.Operation]: + r"""Return a callable for the configure dns settings method over gRPC. + + Updates a ``Registration``'s DNS settings. + + Returns: + Callable[[~.ConfigureDnsSettingsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "configure_dns_settings" not in self._stubs: + self._stubs["configure_dns_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/ConfigureDnsSettings", + request_serializer=domains.ConfigureDnsSettingsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["configure_dns_settings"] + + @property + def configure_contact_settings( + self, + ) -> Callable[[domains.ConfigureContactSettingsRequest], operations_pb2.Operation]: + r"""Return a callable for the configure contact settings method over gRPC. + + Updates a ``Registration``'s contact settings. Some changes + require confirmation by the domain's registrant contact . + + Returns: + Callable[[~.ConfigureContactSettingsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "configure_contact_settings" not in self._stubs: + self._stubs["configure_contact_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/ConfigureContactSettings", + request_serializer=domains.ConfigureContactSettingsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["configure_contact_settings"] + + @property + def export_registration( + self, + ) -> Callable[[domains.ExportRegistrationRequest], operations_pb2.Operation]: + r"""Return a callable for the export registration method over gRPC. + + Exports a ``Registration`` resource, such that it is no longer + managed by Cloud Domains. + + When an active domain is successfully exported, you can continue + to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + Returns: + Callable[[~.ExportRegistrationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_registration" not in self._stubs: + self._stubs["export_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/ExportRegistration", + request_serializer=domains.ExportRegistrationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_registration"] + + @property + def delete_registration( + self, + ) -> Callable[[domains.DeleteRegistrationRequest], operations_pb2.Operation]: + r"""Return a callable for the delete registration method over gRPC. + + Deletes a ``Registration`` resource. + + This method works on any ``Registration`` resource using + `Subscription or Commitment + billing `__, provided that the + resource was created at least 1 day in the past. + + For ``Registration`` resources using `Monthly + billing `__, this method works + if: + + - ``state`` is ``EXPORTED`` with ``expire_time`` in the past + - ``state`` is ``REGISTRATION_FAILED`` + - ``state`` is ``TRANSFER_FAILED`` + + When an active registration is successfully deleted, you can + continue to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + Returns: + Callable[[~.DeleteRegistrationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_registration" not in self._stubs: + self._stubs["delete_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/DeleteRegistration", + request_serializer=domains.DeleteRegistrationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_registration"] + + @property + def retrieve_authorization_code( + self, + ) -> Callable[ + [domains.RetrieveAuthorizationCodeRequest], domains.AuthorizationCode + ]: + r"""Return a callable for the retrieve authorization code method over gRPC. + + Gets the authorization code of the ``Registration`` for the + purpose of transferring the domain to another registrar. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + Returns: + Callable[[~.RetrieveAuthorizationCodeRequest], + ~.AuthorizationCode]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_authorization_code" not in self._stubs: + self._stubs["retrieve_authorization_code"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/RetrieveAuthorizationCode", + request_serializer=domains.RetrieveAuthorizationCodeRequest.serialize, + response_deserializer=domains.AuthorizationCode.deserialize, + ) + return self._stubs["retrieve_authorization_code"] + + @property + def reset_authorization_code( + self, + ) -> Callable[[domains.ResetAuthorizationCodeRequest], domains.AuthorizationCode]: + r"""Return a callable for the reset authorization code method over gRPC. + + Resets the authorization code of the ``Registration`` to a new + random string. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + Returns: + Callable[[~.ResetAuthorizationCodeRequest], + ~.AuthorizationCode]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reset_authorization_code" not in self._stubs: + self._stubs["reset_authorization_code"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/ResetAuthorizationCode", + request_serializer=domains.ResetAuthorizationCodeRequest.serialize, + response_deserializer=domains.AuthorizationCode.deserialize, + ) + return self._stubs["reset_authorization_code"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DomainsGrpcTransport",) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/grpc_asyncio.py b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/grpc_asyncio.py new file mode 100644 index 000000000000..d67a24686152 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/grpc_asyncio.py @@ -0,0 +1,770 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.domains_v1.types import domains + +from .base import DEFAULT_CLIENT_INFO, DomainsTransport +from .grpc import DomainsGrpcTransport + + +class DomainsGrpcAsyncIOTransport(DomainsTransport): + """gRPC AsyncIO backend transport for Domains. + + The Cloud Domains API enables management and configuration of + domain names. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "domains.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "domains.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def search_domains( + self, + ) -> Callable[ + [domains.SearchDomainsRequest], Awaitable[domains.SearchDomainsResponse] + ]: + r"""Return a callable for the search domains method over gRPC. + + Searches for available domain names similar to the provided + query. + + Availability results from this method are approximate; call + ``RetrieveRegisterParameters`` on a domain before registering to + confirm availability. + + Returns: + Callable[[~.SearchDomainsRequest], + Awaitable[~.SearchDomainsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_domains" not in self._stubs: + self._stubs["search_domains"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/SearchDomains", + request_serializer=domains.SearchDomainsRequest.serialize, + response_deserializer=domains.SearchDomainsResponse.deserialize, + ) + return self._stubs["search_domains"] + + @property + def retrieve_register_parameters( + self, + ) -> Callable[ + [domains.RetrieveRegisterParametersRequest], + Awaitable[domains.RetrieveRegisterParametersResponse], + ]: + r"""Return a callable for the retrieve register parameters method over gRPC. + + Gets parameters needed to register a new domain name, including + price and up-to-date availability. Use the returned values to + call ``RegisterDomain``. + + Returns: + Callable[[~.RetrieveRegisterParametersRequest], + Awaitable[~.RetrieveRegisterParametersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_register_parameters" not in self._stubs: + self._stubs["retrieve_register_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/RetrieveRegisterParameters", + request_serializer=domains.RetrieveRegisterParametersRequest.serialize, + response_deserializer=domains.RetrieveRegisterParametersResponse.deserialize, + ) + return self._stubs["retrieve_register_parameters"] + + @property + def register_domain( + self, + ) -> Callable[[domains.RegisterDomainRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the register domain method over gRPC. + + Registers a new domain name and creates a corresponding + ``Registration`` resource. + + Call ``RetrieveRegisterParameters`` first to check availability + of the domain name and determine parameters like price that are + needed to build a call to this method. + + A successful call creates a ``Registration`` resource in state + ``REGISTRATION_PENDING``, which resolves to ``ACTIVE`` within + 1-2 minutes, indicating that the domain was successfully + registered. If the resource ends up in state + ``REGISTRATION_FAILED``, it indicates that the domain was not + registered successfully, and you can safely delete the resource + and retry registration. + + Returns: + Callable[[~.RegisterDomainRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "register_domain" not in self._stubs: + self._stubs["register_domain"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/RegisterDomain", + request_serializer=domains.RegisterDomainRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["register_domain"] + + @property + def retrieve_transfer_parameters( + self, + ) -> Callable[ + [domains.RetrieveTransferParametersRequest], + Awaitable[domains.RetrieveTransferParametersResponse], + ]: + r"""Return a callable for the retrieve transfer parameters method over gRPC. + + Gets parameters needed to transfer a domain name from another + registrar to Cloud Domains. For domains managed by Google + Domains, transferring to Cloud Domains is not supported. + + Use the returned values to call ``TransferDomain``. + + Returns: + Callable[[~.RetrieveTransferParametersRequest], + Awaitable[~.RetrieveTransferParametersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_transfer_parameters" not in self._stubs: + self._stubs["retrieve_transfer_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/RetrieveTransferParameters", + request_serializer=domains.RetrieveTransferParametersRequest.serialize, + response_deserializer=domains.RetrieveTransferParametersResponse.deserialize, + ) + return self._stubs["retrieve_transfer_parameters"] + + @property + def transfer_domain( + self, + ) -> Callable[[domains.TransferDomainRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the transfer domain method over gRPC. + + Transfers a domain name from another registrar to Cloud Domains. + For domains managed by Google Domains, transferring to Cloud + Domains is not supported. + + Before calling this method, go to the domain's current registrar + to unlock the domain for transfer and retrieve the domain's + transfer authorization code. Then call + ``RetrieveTransferParameters`` to confirm that the domain is + unlocked and to get values needed to build a call to this + method. + + A successful call creates a ``Registration`` resource in state + ``TRANSFER_PENDING``. It can take several days to complete the + transfer process. The registrant can often speed up this process + by approving the transfer through the current registrar, either + by clicking a link in an email from the registrar or by visiting + the registrar's website. + + A few minutes after transfer approval, the resource transitions + to state ``ACTIVE``, indicating that the transfer was + successful. If the transfer is rejected or the request expires + without being approved, the resource can end up in state + ``TRANSFER_FAILED``. If transfer fails, you can safely delete + the resource and retry the transfer. + + Returns: + Callable[[~.TransferDomainRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "transfer_domain" not in self._stubs: + self._stubs["transfer_domain"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/TransferDomain", + request_serializer=domains.TransferDomainRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["transfer_domain"] + + @property + def list_registrations( + self, + ) -> Callable[ + [domains.ListRegistrationsRequest], Awaitable[domains.ListRegistrationsResponse] + ]: + r"""Return a callable for the list registrations method over gRPC. + + Lists the ``Registration`` resources in a project. + + Returns: + Callable[[~.ListRegistrationsRequest], + Awaitable[~.ListRegistrationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_registrations" not in self._stubs: + self._stubs["list_registrations"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/ListRegistrations", + request_serializer=domains.ListRegistrationsRequest.serialize, + response_deserializer=domains.ListRegistrationsResponse.deserialize, + ) + return self._stubs["list_registrations"] + + @property + def get_registration( + self, + ) -> Callable[[domains.GetRegistrationRequest], Awaitable[domains.Registration]]: + r"""Return a callable for the get registration method over gRPC. + + Gets the details of a ``Registration`` resource. + + Returns: + Callable[[~.GetRegistrationRequest], + Awaitable[~.Registration]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_registration" not in self._stubs: + self._stubs["get_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/GetRegistration", + request_serializer=domains.GetRegistrationRequest.serialize, + response_deserializer=domains.Registration.deserialize, + ) + return self._stubs["get_registration"] + + @property + def update_registration( + self, + ) -> Callable[ + [domains.UpdateRegistrationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update registration method over gRPC. + + Updates select fields of a ``Registration`` resource, notably + ``labels``. To update other fields, use the appropriate custom + update method: + + - To update management settings, see + ``ConfigureManagementSettings`` + - To update DNS configuration, see ``ConfigureDnsSettings`` + - To update contact information, see + ``ConfigureContactSettings`` + + Returns: + Callable[[~.UpdateRegistrationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_registration" not in self._stubs: + self._stubs["update_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/UpdateRegistration", + request_serializer=domains.UpdateRegistrationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_registration"] + + @property + def configure_management_settings( + self, + ) -> Callable[ + [domains.ConfigureManagementSettingsRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the configure management settings method over gRPC. + + Updates a ``Registration``'s management settings. + + Returns: + Callable[[~.ConfigureManagementSettingsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "configure_management_settings" not in self._stubs: + self._stubs[ + "configure_management_settings" + ] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/ConfigureManagementSettings", + request_serializer=domains.ConfigureManagementSettingsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["configure_management_settings"] + + @property + def configure_dns_settings( + self, + ) -> Callable[ + [domains.ConfigureDnsSettingsRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the configure dns settings method over gRPC. + + Updates a ``Registration``'s DNS settings. + + Returns: + Callable[[~.ConfigureDnsSettingsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "configure_dns_settings" not in self._stubs: + self._stubs["configure_dns_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/ConfigureDnsSettings", + request_serializer=domains.ConfigureDnsSettingsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["configure_dns_settings"] + + @property + def configure_contact_settings( + self, + ) -> Callable[ + [domains.ConfigureContactSettingsRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the configure contact settings method over gRPC. + + Updates a ``Registration``'s contact settings. Some changes + require confirmation by the domain's registrant contact . + + Returns: + Callable[[~.ConfigureContactSettingsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "configure_contact_settings" not in self._stubs: + self._stubs["configure_contact_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/ConfigureContactSettings", + request_serializer=domains.ConfigureContactSettingsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["configure_contact_settings"] + + @property + def export_registration( + self, + ) -> Callable[ + [domains.ExportRegistrationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the export registration method over gRPC. + + Exports a ``Registration`` resource, such that it is no longer + managed by Cloud Domains. + + When an active domain is successfully exported, you can continue + to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + Returns: + Callable[[~.ExportRegistrationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_registration" not in self._stubs: + self._stubs["export_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/ExportRegistration", + request_serializer=domains.ExportRegistrationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_registration"] + + @property + def delete_registration( + self, + ) -> Callable[ + [domains.DeleteRegistrationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete registration method over gRPC. + + Deletes a ``Registration`` resource. + + This method works on any ``Registration`` resource using + `Subscription or Commitment + billing `__, provided that the + resource was created at least 1 day in the past. + + For ``Registration`` resources using `Monthly + billing `__, this method works + if: + + - ``state`` is ``EXPORTED`` with ``expire_time`` in the past + - ``state`` is ``REGISTRATION_FAILED`` + - ``state`` is ``TRANSFER_FAILED`` + + When an active registration is successfully deleted, you can + continue to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + Returns: + Callable[[~.DeleteRegistrationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_registration" not in self._stubs: + self._stubs["delete_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/DeleteRegistration", + request_serializer=domains.DeleteRegistrationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_registration"] + + @property + def retrieve_authorization_code( + self, + ) -> Callable[ + [domains.RetrieveAuthorizationCodeRequest], Awaitable[domains.AuthorizationCode] + ]: + r"""Return a callable for the retrieve authorization code method over gRPC. + + Gets the authorization code of the ``Registration`` for the + purpose of transferring the domain to another registrar. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + Returns: + Callable[[~.RetrieveAuthorizationCodeRequest], + Awaitable[~.AuthorizationCode]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_authorization_code" not in self._stubs: + self._stubs["retrieve_authorization_code"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/RetrieveAuthorizationCode", + request_serializer=domains.RetrieveAuthorizationCodeRequest.serialize, + response_deserializer=domains.AuthorizationCode.deserialize, + ) + return self._stubs["retrieve_authorization_code"] + + @property + def reset_authorization_code( + self, + ) -> Callable[ + [domains.ResetAuthorizationCodeRequest], Awaitable[domains.AuthorizationCode] + ]: + r"""Return a callable for the reset authorization code method over gRPC. + + Resets the authorization code of the ``Registration`` to a new + random string. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + Returns: + Callable[[~.ResetAuthorizationCodeRequest], + Awaitable[~.AuthorizationCode]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reset_authorization_code" not in self._stubs: + self._stubs["reset_authorization_code"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1.Domains/ResetAuthorizationCode", + request_serializer=domains.ResetAuthorizationCodeRequest.serialize, + response_deserializer=domains.AuthorizationCode.deserialize, + ) + return self._stubs["reset_authorization_code"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("DomainsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/rest.py b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/rest.py new file mode 100644 index 000000000000..65bffeb081ee --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/rest.py @@ -0,0 +1,2251 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.domains_v1.types import domains + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DomainsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DomainsRestInterceptor: + """Interceptor for Domains. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DomainsRestTransport. + + .. code-block:: python + class MyCustomDomainsInterceptor(DomainsRestInterceptor): + def pre_configure_contact_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_configure_contact_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_configure_dns_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_configure_dns_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_configure_management_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_configure_management_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_registration(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_registration(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_export_registration(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_registration(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_registration(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_registration(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_registrations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_registrations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_register_domain(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_register_domain(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_reset_authorization_code(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_reset_authorization_code(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_retrieve_authorization_code(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_retrieve_authorization_code(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_retrieve_register_parameters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_retrieve_register_parameters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_retrieve_transfer_parameters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_retrieve_transfer_parameters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_search_domains(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_domains(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_transfer_domain(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_transfer_domain(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_registration(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_registration(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DomainsRestTransport(interceptor=MyCustomDomainsInterceptor()) + client = DomainsClient(transport=transport) + + + """ + + def pre_configure_contact_settings( + self, + request: domains.ConfigureContactSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.ConfigureContactSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for configure_contact_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_configure_contact_settings( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for configure_contact_settings + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_configure_dns_settings( + self, + request: domains.ConfigureDnsSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.ConfigureDnsSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for configure_dns_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_configure_dns_settings( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for configure_dns_settings + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_configure_management_settings( + self, + request: domains.ConfigureManagementSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.ConfigureManagementSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for configure_management_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_configure_management_settings( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for configure_management_settings + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_delete_registration( + self, + request: domains.DeleteRegistrationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.DeleteRegistrationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_registration + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_delete_registration( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_registration + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_export_registration( + self, + request: domains.ExportRegistrationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.ExportRegistrationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_registration + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_export_registration( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_registration + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_get_registration( + self, + request: domains.GetRegistrationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.GetRegistrationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_registration + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_get_registration( + self, response: domains.Registration + ) -> domains.Registration: + """Post-rpc interceptor for get_registration + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_list_registrations( + self, + request: domains.ListRegistrationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.ListRegistrationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_registrations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_list_registrations( + self, response: domains.ListRegistrationsResponse + ) -> domains.ListRegistrationsResponse: + """Post-rpc interceptor for list_registrations + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_register_domain( + self, + request: domains.RegisterDomainRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.RegisterDomainRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for register_domain + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_register_domain( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for register_domain + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_reset_authorization_code( + self, + request: domains.ResetAuthorizationCodeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.ResetAuthorizationCodeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reset_authorization_code + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_reset_authorization_code( + self, response: domains.AuthorizationCode + ) -> domains.AuthorizationCode: + """Post-rpc interceptor for reset_authorization_code + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_retrieve_authorization_code( + self, + request: domains.RetrieveAuthorizationCodeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.RetrieveAuthorizationCodeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for retrieve_authorization_code + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_retrieve_authorization_code( + self, response: domains.AuthorizationCode + ) -> domains.AuthorizationCode: + """Post-rpc interceptor for retrieve_authorization_code + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_retrieve_register_parameters( + self, + request: domains.RetrieveRegisterParametersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.RetrieveRegisterParametersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for retrieve_register_parameters + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_retrieve_register_parameters( + self, response: domains.RetrieveRegisterParametersResponse + ) -> domains.RetrieveRegisterParametersResponse: + """Post-rpc interceptor for retrieve_register_parameters + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_retrieve_transfer_parameters( + self, + request: domains.RetrieveTransferParametersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.RetrieveTransferParametersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for retrieve_transfer_parameters + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_retrieve_transfer_parameters( + self, response: domains.RetrieveTransferParametersResponse + ) -> domains.RetrieveTransferParametersResponse: + """Post-rpc interceptor for retrieve_transfer_parameters + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_search_domains( + self, request: domains.SearchDomainsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[domains.SearchDomainsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for search_domains + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_search_domains( + self, response: domains.SearchDomainsResponse + ) -> domains.SearchDomainsResponse: + """Post-rpc interceptor for search_domains + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_transfer_domain( + self, + request: domains.TransferDomainRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.TransferDomainRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for transfer_domain + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_transfer_domain( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for transfer_domain + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_update_registration( + self, + request: domains.UpdateRegistrationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.UpdateRegistrationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_registration + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_update_registration( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_registration + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DomainsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DomainsRestInterceptor + + +class DomainsRestTransport(DomainsTransport): + """REST backend transport for Domains. + + The Cloud Domains API enables management and configuration of + domain names. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "domains.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DomainsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DomainsRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _ConfigureContactSettings(DomainsRestStub): + def __hash__(self): + return hash("ConfigureContactSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.ConfigureContactSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the configure contact + settings method over HTTP. + + Args: + request (~.domains.ConfigureContactSettingsRequest): + The request object. Request for the ``ConfigureContactSettings`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{registration=projects/*/locations/*/registrations/*}:configureContactSettings", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_configure_contact_settings( + request, metadata + ) + pb_request = domains.ConfigureContactSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_configure_contact_settings(resp) + return resp + + class _ConfigureDnsSettings(DomainsRestStub): + def __hash__(self): + return hash("ConfigureDnsSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.ConfigureDnsSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the configure dns settings method over HTTP. + + Args: + request (~.domains.ConfigureDnsSettingsRequest): + The request object. Request for the ``ConfigureDnsSettings`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{registration=projects/*/locations/*/registrations/*}:configureDnsSettings", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_configure_dns_settings( + request, metadata + ) + pb_request = domains.ConfigureDnsSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_configure_dns_settings(resp) + return resp + + class _ConfigureManagementSettings(DomainsRestStub): + def __hash__(self): + return hash("ConfigureManagementSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.ConfigureManagementSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the configure management + settings method over HTTP. + + Args: + request (~.domains.ConfigureManagementSettingsRequest): + The request object. Request for the ``ConfigureManagementSettings`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{registration=projects/*/locations/*/registrations/*}:configureManagementSettings", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_configure_management_settings( + request, metadata + ) + pb_request = domains.ConfigureManagementSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_configure_management_settings(resp) + return resp + + class _DeleteRegistration(DomainsRestStub): + def __hash__(self): + return hash("DeleteRegistration") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.DeleteRegistrationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete registration method over HTTP. + + Args: + request (~.domains.DeleteRegistrationRequest): + The request object. Request for the ``DeleteRegistration`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/registrations/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_registration( + request, metadata + ) + pb_request = domains.DeleteRegistrationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_registration(resp) + return resp + + class _ExportRegistration(DomainsRestStub): + def __hash__(self): + return hash("ExportRegistration") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.ExportRegistrationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the export registration method over HTTP. + + Args: + request (~.domains.ExportRegistrationRequest): + The request object. Request for the ``ExportRegistration`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/registrations/*}:export", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_registration( + request, metadata + ) + pb_request = domains.ExportRegistrationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_registration(resp) + return resp + + class _GetRegistration(DomainsRestStub): + def __hash__(self): + return hash("GetRegistration") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.GetRegistrationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.Registration: + r"""Call the get registration method over HTTP. + + Args: + request (~.domains.GetRegistrationRequest): + The request object. Request for the ``GetRegistration`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.domains.Registration: + The ``Registration`` resource facilitates managing and + configuring domain name registrations. + + There are several ways to create a new ``Registration`` + resource: + + To create a new ``Registration`` resource, find a + suitable domain name by calling the ``SearchDomains`` + method with a query to see available domain name + options. After choosing a name, call + ``RetrieveRegisterParameters`` to ensure availability + and obtain information like pricing, which is needed to + build a call to ``RegisterDomain``. + + Another way to create a new ``Registration`` is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the domain + for transfer and retrieve the domain's transfer + authorization code. Then call + ``RetrieveTransferParameters`` to confirm that the + domain is unlocked and to get values needed to build a + call to ``TransferDomain``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/registrations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_registration( + request, metadata + ) + pb_request = domains.GetRegistrationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = domains.Registration() + pb_resp = domains.Registration.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_registration(resp) + return resp + + class _ListRegistrations(DomainsRestStub): + def __hash__(self): + return hash("ListRegistrations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.ListRegistrationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.ListRegistrationsResponse: + r"""Call the list registrations method over HTTP. + + Args: + request (~.domains.ListRegistrationsRequest): + The request object. Request for the ``ListRegistrations`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.domains.ListRegistrationsResponse: + Response for the ``ListRegistrations`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/registrations", + }, + ] + request, metadata = self._interceptor.pre_list_registrations( + request, metadata + ) + pb_request = domains.ListRegistrationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = domains.ListRegistrationsResponse() + pb_resp = domains.ListRegistrationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_registrations(resp) + return resp + + class _RegisterDomain(DomainsRestStub): + def __hash__(self): + return hash("RegisterDomain") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.RegisterDomainRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the register domain method over HTTP. + + Args: + request (~.domains.RegisterDomainRequest): + The request object. Request for the ``RegisterDomain`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/registrations:register", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_register_domain(request, metadata) + pb_request = domains.RegisterDomainRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_register_domain(resp) + return resp + + class _ResetAuthorizationCode(DomainsRestStub): + def __hash__(self): + return hash("ResetAuthorizationCode") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.ResetAuthorizationCodeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.AuthorizationCode: + r"""Call the reset authorization code method over HTTP. + + Args: + request (~.domains.ResetAuthorizationCodeRequest): + The request object. Request for the ``ResetAuthorizationCode`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.domains.AuthorizationCode: + Defines an authorization code. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{registration=projects/*/locations/*/registrations/*}:resetAuthorizationCode", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_reset_authorization_code( + request, metadata + ) + pb_request = domains.ResetAuthorizationCodeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = domains.AuthorizationCode() + pb_resp = domains.AuthorizationCode.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reset_authorization_code(resp) + return resp + + class _RetrieveAuthorizationCode(DomainsRestStub): + def __hash__(self): + return hash("RetrieveAuthorizationCode") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.RetrieveAuthorizationCodeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.AuthorizationCode: + r"""Call the retrieve authorization + code method over HTTP. + + Args: + request (~.domains.RetrieveAuthorizationCodeRequest): + The request object. Request for the ``RetrieveAuthorizationCode`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.domains.AuthorizationCode: + Defines an authorization code. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{registration=projects/*/locations/*/registrations/*}:retrieveAuthorizationCode", + }, + ] + request, metadata = self._interceptor.pre_retrieve_authorization_code( + request, metadata + ) + pb_request = domains.RetrieveAuthorizationCodeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = domains.AuthorizationCode() + pb_resp = domains.AuthorizationCode.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_retrieve_authorization_code(resp) + return resp + + class _RetrieveRegisterParameters(DomainsRestStub): + def __hash__(self): + return hash("RetrieveRegisterParameters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "domainName": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.RetrieveRegisterParametersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.RetrieveRegisterParametersResponse: + r"""Call the retrieve register + parameters method over HTTP. + + Args: + request (~.domains.RetrieveRegisterParametersRequest): + The request object. Request for the ``RetrieveRegisterParameters`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.domains.RetrieveRegisterParametersResponse: + Response for the ``RetrieveRegisterParameters`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{location=projects/*/locations/*}/registrations:retrieveRegisterParameters", + }, + ] + request, metadata = self._interceptor.pre_retrieve_register_parameters( + request, metadata + ) + pb_request = domains.RetrieveRegisterParametersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = domains.RetrieveRegisterParametersResponse() + pb_resp = domains.RetrieveRegisterParametersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_retrieve_register_parameters(resp) + return resp + + class _RetrieveTransferParameters(DomainsRestStub): + def __hash__(self): + return hash("RetrieveTransferParameters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "domainName": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.RetrieveTransferParametersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.RetrieveTransferParametersResponse: + r"""Call the retrieve transfer + parameters method over HTTP. + + Args: + request (~.domains.RetrieveTransferParametersRequest): + The request object. Request for the ``RetrieveTransferParameters`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.domains.RetrieveTransferParametersResponse: + Response for the ``RetrieveTransferParameters`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{location=projects/*/locations/*}/registrations:retrieveTransferParameters", + }, + ] + request, metadata = self._interceptor.pre_retrieve_transfer_parameters( + request, metadata + ) + pb_request = domains.RetrieveTransferParametersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = domains.RetrieveTransferParametersResponse() + pb_resp = domains.RetrieveTransferParametersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_retrieve_transfer_parameters(resp) + return resp + + class _SearchDomains(DomainsRestStub): + def __hash__(self): + return hash("SearchDomains") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "query": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.SearchDomainsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.SearchDomainsResponse: + r"""Call the search domains method over HTTP. + + Args: + request (~.domains.SearchDomainsRequest): + The request object. Request for the ``SearchDomains`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.domains.SearchDomainsResponse: + Response for the ``SearchDomains`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{location=projects/*/locations/*}/registrations:searchDomains", + }, + ] + request, metadata = self._interceptor.pre_search_domains(request, metadata) + pb_request = domains.SearchDomainsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = domains.SearchDomainsResponse() + pb_resp = domains.SearchDomainsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_domains(resp) + return resp + + class _TransferDomain(DomainsRestStub): + def __hash__(self): + return hash("TransferDomain") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.TransferDomainRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the transfer domain method over HTTP. + + Args: + request (~.domains.TransferDomainRequest): + The request object. Request for the ``TransferDomain`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/registrations:transfer", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_transfer_domain(request, metadata) + pb_request = domains.TransferDomainRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_transfer_domain(resp) + return resp + + class _UpdateRegistration(DomainsRestStub): + def __hash__(self): + return hash("UpdateRegistration") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.UpdateRegistrationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update registration method over HTTP. + + Args: + request (~.domains.UpdateRegistrationRequest): + The request object. Request for the ``UpdateRegistration`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{registration.name=projects/*/locations/*/registrations/*}", + "body": "registration", + }, + ] + request, metadata = self._interceptor.pre_update_registration( + request, metadata + ) + pb_request = domains.UpdateRegistrationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_registration(resp) + return resp + + @property + def configure_contact_settings( + self, + ) -> Callable[[domains.ConfigureContactSettingsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ConfigureContactSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def configure_dns_settings( + self, + ) -> Callable[[domains.ConfigureDnsSettingsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ConfigureDnsSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def configure_management_settings( + self, + ) -> Callable[ + [domains.ConfigureManagementSettingsRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ConfigureManagementSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_registration( + self, + ) -> Callable[[domains.DeleteRegistrationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteRegistration(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_registration( + self, + ) -> Callable[[domains.ExportRegistrationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportRegistration(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_registration( + self, + ) -> Callable[[domains.GetRegistrationRequest], domains.Registration]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRegistration(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_registrations( + self, + ) -> Callable[ + [domains.ListRegistrationsRequest], domains.ListRegistrationsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRegistrations(self._session, self._host, self._interceptor) # type: ignore + + @property + def register_domain( + self, + ) -> Callable[[domains.RegisterDomainRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RegisterDomain(self._session, self._host, self._interceptor) # type: ignore + + @property + def reset_authorization_code( + self, + ) -> Callable[[domains.ResetAuthorizationCodeRequest], domains.AuthorizationCode]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ResetAuthorizationCode(self._session, self._host, self._interceptor) # type: ignore + + @property + def retrieve_authorization_code( + self, + ) -> Callable[ + [domains.RetrieveAuthorizationCodeRequest], domains.AuthorizationCode + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RetrieveAuthorizationCode(self._session, self._host, self._interceptor) # type: ignore + + @property + def retrieve_register_parameters( + self, + ) -> Callable[ + [domains.RetrieveRegisterParametersRequest], + domains.RetrieveRegisterParametersResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RetrieveRegisterParameters(self._session, self._host, self._interceptor) # type: ignore + + @property + def retrieve_transfer_parameters( + self, + ) -> Callable[ + [domains.RetrieveTransferParametersRequest], + domains.RetrieveTransferParametersResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RetrieveTransferParameters(self._session, self._host, self._interceptor) # type: ignore + + @property + def search_domains( + self, + ) -> Callable[[domains.SearchDomainsRequest], domains.SearchDomainsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchDomains(self._session, self._host, self._interceptor) # type: ignore + + @property + def transfer_domain( + self, + ) -> Callable[[domains.TransferDomainRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TransferDomain(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_registration( + self, + ) -> Callable[[domains.UpdateRegistrationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateRegistration(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DomainsRestTransport",) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/types/__init__.py b/packages/google-cloud-domains/google/cloud/domains_v1/types/__init__.py new file mode 100644 index 000000000000..1afe08dc5219 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/types/__init__.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .domains import ( + AuthorizationCode, + ConfigureContactSettingsRequest, + ConfigureDnsSettingsRequest, + ConfigureManagementSettingsRequest, + ContactNotice, + ContactPrivacy, + ContactSettings, + DeleteRegistrationRequest, + DnsSettings, + DomainNotice, + ExportRegistrationRequest, + GetRegistrationRequest, + ListRegistrationsRequest, + ListRegistrationsResponse, + ManagementSettings, + OperationMetadata, + RegisterDomainRequest, + RegisterParameters, + Registration, + ResetAuthorizationCodeRequest, + RetrieveAuthorizationCodeRequest, + RetrieveRegisterParametersRequest, + RetrieveRegisterParametersResponse, + RetrieveTransferParametersRequest, + RetrieveTransferParametersResponse, + SearchDomainsRequest, + SearchDomainsResponse, + TransferDomainRequest, + TransferLockState, + TransferParameters, + UpdateRegistrationRequest, +) + +__all__ = ( + "AuthorizationCode", + "ConfigureContactSettingsRequest", + "ConfigureDnsSettingsRequest", + "ConfigureManagementSettingsRequest", + "ContactSettings", + "DeleteRegistrationRequest", + "DnsSettings", + "ExportRegistrationRequest", + "GetRegistrationRequest", + "ListRegistrationsRequest", + "ListRegistrationsResponse", + "ManagementSettings", + "OperationMetadata", + "RegisterDomainRequest", + "RegisterParameters", + "Registration", + "ResetAuthorizationCodeRequest", + "RetrieveAuthorizationCodeRequest", + "RetrieveRegisterParametersRequest", + "RetrieveRegisterParametersResponse", + "RetrieveTransferParametersRequest", + "RetrieveTransferParametersResponse", + "SearchDomainsRequest", + "SearchDomainsResponse", + "TransferDomainRequest", + "TransferParameters", + "UpdateRegistrationRequest", + "ContactNotice", + "ContactPrivacy", + "DomainNotice", + "TransferLockState", +) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/types/domains.py b/packages/google-cloud-domains/google/cloud/domains_v1/types/domains.py new file mode 100644 index 000000000000..812976b3d3fb --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1/types/domains.py @@ -0,0 +1,1526 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import money_pb2 # type: ignore +from google.type import postal_address_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.domains.v1", + manifest={ + "ContactPrivacy", + "DomainNotice", + "ContactNotice", + "TransferLockState", + "Registration", + "ManagementSettings", + "DnsSettings", + "ContactSettings", + "SearchDomainsRequest", + "SearchDomainsResponse", + "RetrieveRegisterParametersRequest", + "RetrieveRegisterParametersResponse", + "RegisterDomainRequest", + "RetrieveTransferParametersRequest", + "RetrieveTransferParametersResponse", + "TransferDomainRequest", + "ListRegistrationsRequest", + "ListRegistrationsResponse", + "GetRegistrationRequest", + "UpdateRegistrationRequest", + "ConfigureManagementSettingsRequest", + "ConfigureDnsSettingsRequest", + "ConfigureContactSettingsRequest", + "ExportRegistrationRequest", + "DeleteRegistrationRequest", + "RetrieveAuthorizationCodeRequest", + "ResetAuthorizationCodeRequest", + "RegisterParameters", + "TransferParameters", + "AuthorizationCode", + "OperationMetadata", + }, +) + + +class ContactPrivacy(proto.Enum): + r"""Defines a set of possible contact privacy settings for a + ``Registration``. + + `ICANN `__ maintains the WHOIS database, a + publicly accessible mapping from domain name to contact information, + and requires that each domain name have an entry. Choose from these + options to control how much information in your ``ContactSettings`` + is published. + + Values: + CONTACT_PRIVACY_UNSPECIFIED (0): + The contact privacy settings are undefined. + PUBLIC_CONTACT_DATA (1): + All the data from ``ContactSettings`` is publicly available. + When setting this option, you must also provide a + ``PUBLIC_CONTACT_DATA_ACKNOWLEDGEMENT`` in the + ``contact_notices`` field of the request. + PRIVATE_CONTACT_DATA (2): + None of the data from ``ContactSettings`` is publicly + available. Instead, proxy contact data is published for your + domain. Email sent to the proxy email address is forwarded + to the registrant's email address. Cloud Domains provides + this privacy proxy service at no additional cost. + REDACTED_CONTACT_DATA (3): + Some data from ``ContactSettings`` is publicly available. + The actual information redacted depends on the domain. For + details, see `the registration privacy + article `__. + """ + CONTACT_PRIVACY_UNSPECIFIED = 0 + PUBLIC_CONTACT_DATA = 1 + PRIVATE_CONTACT_DATA = 2 + REDACTED_CONTACT_DATA = 3 + + +class DomainNotice(proto.Enum): + r"""Notices about special properties of certain domains. + + Values: + DOMAIN_NOTICE_UNSPECIFIED (0): + The notice is undefined. + HSTS_PRELOADED (1): + Indicates that the domain is preloaded on the HTTP Strict + Transport Security list in browsers. Serving a website on + such domain requires an SSL certificate. For details, see + `how to get an SSL + certificate `__. + """ + DOMAIN_NOTICE_UNSPECIFIED = 0 + HSTS_PRELOADED = 1 + + +class ContactNotice(proto.Enum): + r"""Notices related to contact information. + + Values: + CONTACT_NOTICE_UNSPECIFIED (0): + The notice is undefined. + PUBLIC_CONTACT_DATA_ACKNOWLEDGEMENT (1): + Required when setting the ``privacy`` field of + ``ContactSettings`` to ``PUBLIC_CONTACT_DATA``, which + exposes contact data publicly. + """ + CONTACT_NOTICE_UNSPECIFIED = 0 + PUBLIC_CONTACT_DATA_ACKNOWLEDGEMENT = 1 + + +class TransferLockState(proto.Enum): + r"""Possible states of a ``Registration``'s transfer lock. + + Values: + TRANSFER_LOCK_STATE_UNSPECIFIED (0): + The state is unspecified. + UNLOCKED (1): + The domain is unlocked and can be transferred + to another registrar. + LOCKED (2): + The domain is locked and cannot be + transferred to another registrar. + """ + TRANSFER_LOCK_STATE_UNSPECIFIED = 0 + UNLOCKED = 1 + LOCKED = 2 + + +class Registration(proto.Message): + r"""The ``Registration`` resource facilitates managing and configuring + domain name registrations. + + There are several ways to create a new ``Registration`` resource: + + To create a new ``Registration`` resource, find a suitable domain + name by calling the ``SearchDomains`` method with a query to see + available domain name options. After choosing a name, call + ``RetrieveRegisterParameters`` to ensure availability and obtain + information like pricing, which is needed to build a call to + ``RegisterDomain``. + + Another way to create a new ``Registration`` is to transfer an + existing domain from another registrar. First, go to the current + registrar to unlock the domain for transfer and retrieve the + domain's transfer authorization code. Then call + ``RetrieveTransferParameters`` to confirm that the domain is + unlocked and to get values needed to build a call to + ``TransferDomain``. + + Attributes: + name (str): + Output only. Name of the ``Registration`` resource, in the + format + ``projects/*/locations/*/registrations/``. + domain_name (str): + Required. Immutable. The domain name. Unicode + domain names must be expressed in Punycode + format. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the ``Registration`` + resource. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The expiration timestamp of the + ``Registration``. + state (google.cloud.domains_v1.types.Registration.State): + Output only. The state of the ``Registration`` + issues (MutableSequence[google.cloud.domains_v1.types.Registration.Issue]): + Output only. The set of issues with the ``Registration`` + that require attention. + labels (MutableMapping[str, str]): + Set of labels associated with the ``Registration``. + management_settings (google.cloud.domains_v1.types.ManagementSettings): + Settings for management of the ``Registration``, including + renewal, billing, and transfer. You cannot update these with + the ``UpdateRegistration`` method. To update these settings, + use the ``ConfigureManagementSettings`` method. + dns_settings (google.cloud.domains_v1.types.DnsSettings): + Settings controlling the DNS configuration of the + ``Registration``. You cannot update these with the + ``UpdateRegistration`` method. To update these settings, use + the ``ConfigureDnsSettings`` method. + contact_settings (google.cloud.domains_v1.types.ContactSettings): + Required. Settings for contact information linked to the + ``Registration``. You cannot update these with the + ``UpdateRegistration`` method. To update these settings, use + the ``ConfigureContactSettings`` method. + pending_contact_settings (google.cloud.domains_v1.types.ContactSettings): + Output only. Pending contact settings for the + ``Registration``. Updates to the ``contact_settings`` field + that change its ``registrant_contact`` or ``privacy`` fields + require email confirmation by the ``registrant_contact`` + before taking effect. This field is set only if there are + pending updates to the ``contact_settings`` that have not + been confirmed. To confirm the changes, the + ``registrant_contact`` must follow the instructions in the + email they receive. + supported_privacy (MutableSequence[google.cloud.domains_v1.types.ContactPrivacy]): + Output only. Set of options for the + ``contact_settings.privacy`` field that this + ``Registration`` supports. + """ + + class State(proto.Enum): + r"""Possible states of a ``Registration``. + + Values: + STATE_UNSPECIFIED (0): + The state is undefined. + REGISTRATION_PENDING (1): + The domain is being registered. + REGISTRATION_FAILED (2): + The domain registration failed. You can + delete resources in this state to allow + registration to be retried. + TRANSFER_PENDING (3): + The domain is being transferred from another + registrar to Cloud Domains. + TRANSFER_FAILED (4): + The attempt to transfer the domain from + another registrar to Cloud Domains failed. You + can delete resources in this state and retry the + transfer. + ACTIVE (6): + The domain is registered and operational. The + domain renews automatically as long as it + remains in this state. + SUSPENDED (7): + The domain is suspended and inoperative. For more details, + see the ``issues`` field. + EXPORTED (8): + The domain is no longer managed with Cloud Domains. It may + have been transferred to another registrar or exported for + management in `Google Domains `__. + You can no longer update it with this API, and information + shown about it may be stale. Domains in this state are not + automatically renewed by Cloud Domains. + """ + STATE_UNSPECIFIED = 0 + REGISTRATION_PENDING = 1 + REGISTRATION_FAILED = 2 + TRANSFER_PENDING = 3 + TRANSFER_FAILED = 4 + ACTIVE = 6 + SUSPENDED = 7 + EXPORTED = 8 + + class Issue(proto.Enum): + r"""Possible issues with a ``Registration`` that require attention. + + Values: + ISSUE_UNSPECIFIED (0): + The issue is undefined. + CONTACT_SUPPORT (1): + Contact the Cloud Support team to resolve a + problem with this domain. + UNVERIFIED_EMAIL (2): + `ICANN `__ requires verification of the + email address in the ``Registration``'s + ``contact_settings.registrant_contact`` field. To verify the + email address, follow the instructions in the email the + ``registrant_contact`` receives following registration. If + you do not complete email verification within 15 days of + registration, the domain is suspended. To resend the + verification email, call ConfigureContactSettings and + provide the current ``registrant_contact.email``. + """ + ISSUE_UNSPECIFIED = 0 + CONTACT_SUPPORT = 1 + UNVERIFIED_EMAIL = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + domain_name: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + issues: MutableSequence[Issue] = proto.RepeatedField( + proto.ENUM, + number=8, + enum=Issue, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) + management_settings: "ManagementSettings" = proto.Field( + proto.MESSAGE, + number=10, + message="ManagementSettings", + ) + dns_settings: "DnsSettings" = proto.Field( + proto.MESSAGE, + number=11, + message="DnsSettings", + ) + contact_settings: "ContactSettings" = proto.Field( + proto.MESSAGE, + number=12, + message="ContactSettings", + ) + pending_contact_settings: "ContactSettings" = proto.Field( + proto.MESSAGE, + number=13, + message="ContactSettings", + ) + supported_privacy: MutableSequence["ContactPrivacy"] = proto.RepeatedField( + proto.ENUM, + number=14, + enum="ContactPrivacy", + ) + + +class ManagementSettings(proto.Message): + r"""Defines renewal, billing, and transfer settings for a + ``Registration``. + + Attributes: + renewal_method (google.cloud.domains_v1.types.ManagementSettings.RenewalMethod): + Output only. The renewal method for this ``Registration``. + transfer_lock_state (google.cloud.domains_v1.types.TransferLockState): + Controls whether the domain can be + transferred to another registrar. + """ + + class RenewalMethod(proto.Enum): + r"""Defines how the ``Registration`` is renewed. + + Values: + RENEWAL_METHOD_UNSPECIFIED (0): + The renewal method is undefined. + AUTOMATIC_RENEWAL (1): + The domain is automatically renewed each year . + + To disable automatic renewals, delete the resource by + calling ``DeleteRegistration`` or export it by calling + ``ExportRegistration``. + MANUAL_RENEWAL (2): + The domain must be explicitly renewed each year before its + ``expire_time``. This option is only available when the + ``Registration`` is in state ``EXPORTED``. + + To manage the domain's current billing and renewal settings, + go to `Google Domains `__. + """ + RENEWAL_METHOD_UNSPECIFIED = 0 + AUTOMATIC_RENEWAL = 1 + MANUAL_RENEWAL = 2 + + renewal_method: RenewalMethod = proto.Field( + proto.ENUM, + number=3, + enum=RenewalMethod, + ) + transfer_lock_state: "TransferLockState" = proto.Field( + proto.ENUM, + number=4, + enum="TransferLockState", + ) + + +class DnsSettings(proto.Message): + r"""Defines the DNS configuration of a ``Registration``, including name + servers, DNSSEC, and glue records. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + custom_dns (google.cloud.domains_v1.types.DnsSettings.CustomDns): + An arbitrary DNS provider identified by its + name servers. + + This field is a member of `oneof`_ ``dns_provider``. + google_domains_dns (google.cloud.domains_v1.types.DnsSettings.GoogleDomainsDns): + The free DNS zone provided by `Google + Domains `__. + + This field is a member of `oneof`_ ``dns_provider``. + glue_records (MutableSequence[google.cloud.domains_v1.types.DnsSettings.GlueRecord]): + The list of glue records for this ``Registration``. Commonly + empty. + """ + + class DsState(proto.Enum): + r"""The publication state of DS records for a ``Registration``. + + Values: + DS_STATE_UNSPECIFIED (0): + DS state is unspecified. + DS_RECORDS_UNPUBLISHED (1): + DNSSEC is disabled for this domain. No DS + records for this domain are published in the + parent DNS zone. + DS_RECORDS_PUBLISHED (2): + DNSSEC is enabled for this domain. Appropriate DS records + for this domain are published in the parent DNS zone. This + option is valid only if the DNS zone referenced in the + ``Registration``'s ``dns_provider`` field is already + DNSSEC-signed. + """ + DS_STATE_UNSPECIFIED = 0 + DS_RECORDS_UNPUBLISHED = 1 + DS_RECORDS_PUBLISHED = 2 + + class CustomDns(proto.Message): + r"""Configuration for an arbitrary DNS provider. + + Attributes: + name_servers (MutableSequence[str]): + Required. A list of name servers that store + the DNS zone for this domain. Each name server + is a domain name, with Unicode domain names + expressed in Punycode format. + ds_records (MutableSequence[google.cloud.domains_v1.types.DnsSettings.DsRecord]): + The list of DS records for this domain, which + are used to enable DNSSEC. The domain's DNS + provider can provide the values to set here. If + this field is empty, DNSSEC is disabled. + """ + + name_servers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + ds_records: MutableSequence["DnsSettings.DsRecord"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="DnsSettings.DsRecord", + ) + + class GoogleDomainsDns(proto.Message): + r"""Configuration for using the free DNS zone provided by Google Domains + as a ``Registration``'s ``dns_provider``. You cannot configure the + DNS zone itself using the API. To configure the DNS zone, go to + `Google Domains `__. + + Attributes: + name_servers (MutableSequence[str]): + Output only. A list of name servers that + store the DNS zone for this domain. Each name + server is a domain name, with Unicode domain + names expressed in Punycode format. This field + is automatically populated with the name servers + assigned to the Google Domains DNS zone. + ds_state (google.cloud.domains_v1.types.DnsSettings.DsState): + Required. The state of DS records for this + domain. Used to enable or disable automatic + DNSSEC. + ds_records (MutableSequence[google.cloud.domains_v1.types.DnsSettings.DsRecord]): + Output only. The list of DS records published for this + domain. The list is automatically populated when + ``ds_state`` is ``DS_RECORDS_PUBLISHED``, otherwise it + remains empty. + """ + + name_servers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + ds_state: "DnsSettings.DsState" = proto.Field( + proto.ENUM, + number=2, + enum="DnsSettings.DsState", + ) + ds_records: MutableSequence["DnsSettings.DsRecord"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="DnsSettings.DsRecord", + ) + + class DsRecord(proto.Message): + r"""Defines a Delegation Signer (DS) record, which is needed to + enable DNSSEC for a domain. It contains a digest (hash) of a + DNSKEY record that must be present in the domain's DNS zone. + + Attributes: + key_tag (int): + The key tag of the record. Must be set in + range 0 -- 65535. + algorithm (google.cloud.domains_v1.types.DnsSettings.DsRecord.Algorithm): + The algorithm used to generate the referenced + DNSKEY. + digest_type (google.cloud.domains_v1.types.DnsSettings.DsRecord.DigestType): + The hash function used to generate the digest + of the referenced DNSKEY. + digest (str): + The digest generated from the referenced + DNSKEY. + """ + + class Algorithm(proto.Enum): + r"""List of algorithms used to create a DNSKEY. Certain + algorithms are not supported for particular domains. + + Values: + ALGORITHM_UNSPECIFIED (0): + The algorithm is unspecified. + RSAMD5 (1): + RSA/MD5. Cannot be used for new deployments. + DH (2): + Diffie-Hellman. Cannot be used for new + deployments. + DSA (3): + DSA/SHA1. Not recommended for new + deployments. + ECC (4): + ECC. Not recommended for new deployments. + RSASHA1 (5): + RSA/SHA-1. Not recommended for new + deployments. + DSANSEC3SHA1 (6): + DSA-NSEC3-SHA1. Not recommended for new + deployments. + RSASHA1NSEC3SHA1 (7): + RSA/SHA1-NSEC3-SHA1. Not recommended for new + deployments. + RSASHA256 (8): + RSA/SHA-256. + RSASHA512 (10): + RSA/SHA-512. + ECCGOST (12): + GOST R 34.10-2001. + ECDSAP256SHA256 (13): + ECDSA Curve P-256 with SHA-256. + ECDSAP384SHA384 (14): + ECDSA Curve P-384 with SHA-384. + ED25519 (15): + Ed25519. + ED448 (16): + Ed448. + INDIRECT (252): + Reserved for Indirect Keys. Cannot be used + for new deployments. + PRIVATEDNS (253): + Private algorithm. Cannot be used for new + deployments. + PRIVATEOID (254): + Private algorithm OID. Cannot be used for new + deployments. + """ + ALGORITHM_UNSPECIFIED = 0 + RSAMD5 = 1 + DH = 2 + DSA = 3 + ECC = 4 + RSASHA1 = 5 + DSANSEC3SHA1 = 6 + RSASHA1NSEC3SHA1 = 7 + RSASHA256 = 8 + RSASHA512 = 10 + ECCGOST = 12 + ECDSAP256SHA256 = 13 + ECDSAP384SHA384 = 14 + ED25519 = 15 + ED448 = 16 + INDIRECT = 252 + PRIVATEDNS = 253 + PRIVATEOID = 254 + + class DigestType(proto.Enum): + r"""List of hash functions that may have been used to generate a + digest of a DNSKEY. + + Values: + DIGEST_TYPE_UNSPECIFIED (0): + The DigestType is unspecified. + SHA1 (1): + SHA-1. Not recommended for new deployments. + SHA256 (2): + SHA-256. + GOST3411 (3): + GOST R 34.11-94. + SHA384 (4): + SHA-384. + """ + DIGEST_TYPE_UNSPECIFIED = 0 + SHA1 = 1 + SHA256 = 2 + GOST3411 = 3 + SHA384 = 4 + + key_tag: int = proto.Field( + proto.INT32, + number=1, + ) + algorithm: "DnsSettings.DsRecord.Algorithm" = proto.Field( + proto.ENUM, + number=2, + enum="DnsSettings.DsRecord.Algorithm", + ) + digest_type: "DnsSettings.DsRecord.DigestType" = proto.Field( + proto.ENUM, + number=3, + enum="DnsSettings.DsRecord.DigestType", + ) + digest: str = proto.Field( + proto.STRING, + number=4, + ) + + class GlueRecord(proto.Message): + r"""Defines a host on your domain that is a DNS name server for your + domain and/or other domains. Glue records are a way of making the IP + address of a name server known, even when it serves DNS queries for + its parent domain. For example, when ``ns.example.com`` is a name + server for ``example.com``, the host ``ns.example.com`` must have a + glue record to break the circular DNS reference. + + Attributes: + host_name (str): + Required. Domain name of the host in Punycode + format. + ipv4_addresses (MutableSequence[str]): + List of IPv4 addresses corresponding to this host in the + standard decimal format (e.g. ``198.51.100.1``). At least + one of ``ipv4_address`` and ``ipv6_address`` must be set. + ipv6_addresses (MutableSequence[str]): + List of IPv6 addresses corresponding to this host in the + standard hexadecimal format (e.g. ``2001:db8::``). At least + one of ``ipv4_address`` and ``ipv6_address`` must be set. + """ + + host_name: str = proto.Field( + proto.STRING, + number=1, + ) + ipv4_addresses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + ipv6_addresses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + custom_dns: CustomDns = proto.Field( + proto.MESSAGE, + number=1, + oneof="dns_provider", + message=CustomDns, + ) + google_domains_dns: GoogleDomainsDns = proto.Field( + proto.MESSAGE, + number=2, + oneof="dns_provider", + message=GoogleDomainsDns, + ) + glue_records: MutableSequence[GlueRecord] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=GlueRecord, + ) + + +class ContactSettings(proto.Message): + r"""Defines the contact information associated with a ``Registration``. + + `ICANN `__ requires all domain names to have + associated contact information. The ``registrant_contact`` is + considered the domain's legal owner, and often the other contacts + are identical. + + Attributes: + privacy (google.cloud.domains_v1.types.ContactPrivacy): + Required. Privacy setting for the contacts associated with + the ``Registration``. + registrant_contact (google.cloud.domains_v1.types.ContactSettings.Contact): + Required. The registrant contact for the ``Registration``. + + *Caution: Anyone with access to this email address, phone + number, and/or postal address can take control of the + domain.* + + *Warning: For new ``Registration``\ s, the registrant + receives an email confirmation that they must complete + within 15 days to avoid domain suspension.* + admin_contact (google.cloud.domains_v1.types.ContactSettings.Contact): + Required. The administrative contact for the + ``Registration``. + technical_contact (google.cloud.domains_v1.types.ContactSettings.Contact): + Required. The technical contact for the ``Registration``. + """ + + class Contact(proto.Message): + r"""Details required for a contact associated with a ``Registration``. + + Attributes: + postal_address (google.type.postal_address_pb2.PostalAddress): + Required. Postal address of the contact. + email (str): + Required. Email address of the contact. + phone_number (str): + Required. Phone number of the contact in international + format. For example, ``"+1-800-555-0123"``. + fax_number (str): + Fax number of the contact in international format. For + example, ``"+1-800-555-0123"``. + """ + + postal_address: postal_address_pb2.PostalAddress = proto.Field( + proto.MESSAGE, + number=1, + message=postal_address_pb2.PostalAddress, + ) + email: str = proto.Field( + proto.STRING, + number=2, + ) + phone_number: str = proto.Field( + proto.STRING, + number=3, + ) + fax_number: str = proto.Field( + proto.STRING, + number=4, + ) + + privacy: "ContactPrivacy" = proto.Field( + proto.ENUM, + number=1, + enum="ContactPrivacy", + ) + registrant_contact: Contact = proto.Field( + proto.MESSAGE, + number=2, + message=Contact, + ) + admin_contact: Contact = proto.Field( + proto.MESSAGE, + number=3, + message=Contact, + ) + technical_contact: Contact = proto.Field( + proto.MESSAGE, + number=4, + message=Contact, + ) + + +class SearchDomainsRequest(proto.Message): + r"""Request for the ``SearchDomains`` method. + + Attributes: + query (str): + Required. String used to search for available + domain names. + location (str): + Required. The location. Must be in the format + ``projects/*/locations/*``. + """ + + query: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SearchDomainsResponse(proto.Message): + r"""Response for the ``SearchDomains`` method. + + Attributes: + register_parameters (MutableSequence[google.cloud.domains_v1.types.RegisterParameters]): + Results of the domain name search. + """ + + register_parameters: MutableSequence["RegisterParameters"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="RegisterParameters", + ) + + +class RetrieveRegisterParametersRequest(proto.Message): + r"""Request for the ``RetrieveRegisterParameters`` method. + + Attributes: + domain_name (str): + Required. The domain name. Unicode domain + names must be expressed in Punycode format. + location (str): + Required. The location. Must be in the format + ``projects/*/locations/*``. + """ + + domain_name: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RetrieveRegisterParametersResponse(proto.Message): + r"""Response for the ``RetrieveRegisterParameters`` method. + + Attributes: + register_parameters (google.cloud.domains_v1.types.RegisterParameters): + Parameters to use when calling the ``RegisterDomain`` + method. + """ + + register_parameters: "RegisterParameters" = proto.Field( + proto.MESSAGE, + number=1, + message="RegisterParameters", + ) + + +class RegisterDomainRequest(proto.Message): + r"""Request for the ``RegisterDomain`` method. + + Attributes: + parent (str): + Required. The parent resource of the ``Registration``. Must + be in the format ``projects/*/locations/*``. + registration (google.cloud.domains_v1.types.Registration): + Required. The complete ``Registration`` resource to be + created. + domain_notices (MutableSequence[google.cloud.domains_v1.types.DomainNotice]): + The list of domain notices that you acknowledge. Call + ``RetrieveRegisterParameters`` to see the notices that need + acknowledgement. + contact_notices (MutableSequence[google.cloud.domains_v1.types.ContactNotice]): + The list of contact notices that the caller acknowledges. + The notices needed here depend on the values specified in + ``registration.contact_settings``. + yearly_price (google.type.money_pb2.Money): + Required. Yearly price to register or renew + the domain. The value that should be put here + can be obtained from RetrieveRegisterParameters + or SearchDomains calls. + validate_only (bool): + When true, only validation is performed, without actually + registering the domain. Follows: + https://cloud.google.com/apis/design/design_patterns#request_validation + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + registration: "Registration" = proto.Field( + proto.MESSAGE, + number=2, + message="Registration", + ) + domain_notices: MutableSequence["DomainNotice"] = proto.RepeatedField( + proto.ENUM, + number=3, + enum="DomainNotice", + ) + contact_notices: MutableSequence["ContactNotice"] = proto.RepeatedField( + proto.ENUM, + number=4, + enum="ContactNotice", + ) + yearly_price: money_pb2.Money = proto.Field( + proto.MESSAGE, + number=5, + message=money_pb2.Money, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class RetrieveTransferParametersRequest(proto.Message): + r"""Request for the ``RetrieveTransferParameters`` method. + + Attributes: + domain_name (str): + Required. The domain name. Unicode domain + names must be expressed in Punycode format. + location (str): + Required. The location. Must be in the format + ``projects/*/locations/*``. + """ + + domain_name: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RetrieveTransferParametersResponse(proto.Message): + r"""Response for the ``RetrieveTransferParameters`` method. + + Attributes: + transfer_parameters (google.cloud.domains_v1.types.TransferParameters): + Parameters to use when calling the ``TransferDomain`` + method. + """ + + transfer_parameters: "TransferParameters" = proto.Field( + proto.MESSAGE, + number=1, + message="TransferParameters", + ) + + +class TransferDomainRequest(proto.Message): + r"""Request for the ``TransferDomain`` method. + + Attributes: + parent (str): + Required. The parent resource of the ``Registration``. Must + be in the format ``projects/*/locations/*``. + registration (google.cloud.domains_v1.types.Registration): + Required. The complete ``Registration`` resource to be + created. + + You can leave ``registration.dns_settings`` unset to import + the domain's current DNS configuration from its current + registrar. Use this option only if you are sure that the + domain's current DNS service does not cease upon transfer, + as is often the case for DNS services provided for free by + the registrar. + contact_notices (MutableSequence[google.cloud.domains_v1.types.ContactNotice]): + The list of contact notices that you acknowledge. The + notices needed here depend on the values specified in + ``registration.contact_settings``. + yearly_price (google.type.money_pb2.Money): + Required. Acknowledgement of the price to transfer or renew + the domain for one year. Call ``RetrieveTransferParameters`` + to obtain the price, which you must acknowledge. + authorization_code (google.cloud.domains_v1.types.AuthorizationCode): + The domain's transfer authorization code. You + can obtain this from the domain's current + registrar. + validate_only (bool): + Validate the request without actually + transferring the domain. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + registration: "Registration" = proto.Field( + proto.MESSAGE, + number=2, + message="Registration", + ) + contact_notices: MutableSequence["ContactNotice"] = proto.RepeatedField( + proto.ENUM, + number=3, + enum="ContactNotice", + ) + yearly_price: money_pb2.Money = proto.Field( + proto.MESSAGE, + number=4, + message=money_pb2.Money, + ) + authorization_code: "AuthorizationCode" = proto.Field( + proto.MESSAGE, + number=5, + message="AuthorizationCode", + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class ListRegistrationsRequest(proto.Message): + r"""Request for the ``ListRegistrations`` method. + + Attributes: + parent (str): + Required. The project and location from which to list + ``Registration``\ s, specified in the format + ``projects/*/locations/*``. + page_size (int): + Maximum number of results to return. + page_token (str): + When set to the ``next_page_token`` from a prior response, + provides the next page of results. + filter (str): + Filter expression to restrict the ``Registration``\ s + returned. + + The expression must specify the field name, a comparison + operator, and the value that you want to use for filtering. + The value must be a string, a number, a boolean, or an enum + value. The comparison operator should be one of =, !=, >, <, + >=, <=, or : for prefix or wildcard matches. + + For example, to filter to a specific domain name, use an + expression like ``domainName="example.com"``. You can also + check for the existence of a field; for example, to find + domains using custom DNS settings, use an expression like + ``dnsSettings.customDns:*``. + + You can also create compound filters by combining + expressions with the ``AND`` and ``OR`` operators. For + example, to find domains that are suspended or have specific + issues flagged, use an expression like + ``(state=SUSPENDED) OR (issue:*)``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListRegistrationsResponse(proto.Message): + r"""Response for the ``ListRegistrations`` method. + + Attributes: + registrations (MutableSequence[google.cloud.domains_v1.types.Registration]): + A list of ``Registration``\ s. + next_page_token (str): + When present, there are more results to retrieve. Set + ``page_token`` to this value on a subsequent call to get the + next page of results. + """ + + @property + def raw_page(self): + return self + + registrations: MutableSequence["Registration"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Registration", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetRegistrationRequest(proto.Message): + r"""Request for the ``GetRegistration`` method. + + Attributes: + name (str): + Required. The name of the ``Registration`` to get, in the + format ``projects/*/locations/*/registrations/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateRegistrationRequest(proto.Message): + r"""Request for the ``UpdateRegistration`` method. + + Attributes: + registration (google.cloud.domains_v1.types.Registration): + Fields of the ``Registration`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to update + as a comma-separated list. For example, if only the labels + are being updated, the ``update_mask`` is ``"labels"``. + """ + + registration: "Registration" = proto.Field( + proto.MESSAGE, + number=1, + message="Registration", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ConfigureManagementSettingsRequest(proto.Message): + r"""Request for the ``ConfigureManagementSettings`` method. + + Attributes: + registration (str): + Required. The name of the ``Registration`` whose management + settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + management_settings (google.cloud.domains_v1.types.ManagementSettings): + Fields of the ``ManagementSettings`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to update + as a comma-separated list. For example, if only the transfer + lock is being updated, the ``update_mask`` is + ``"transfer_lock_state"``. + """ + + registration: str = proto.Field( + proto.STRING, + number=1, + ) + management_settings: "ManagementSettings" = proto.Field( + proto.MESSAGE, + number=2, + message="ManagementSettings", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class ConfigureDnsSettingsRequest(proto.Message): + r"""Request for the ``ConfigureDnsSettings`` method. + + Attributes: + registration (str): + Required. The name of the ``Registration`` whose DNS + settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + dns_settings (google.cloud.domains_v1.types.DnsSettings): + Fields of the ``DnsSettings`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to update + as a comma-separated list. For example, if only the name + servers are being updated for an existing Custom DNS + configuration, the ``update_mask`` is + ``"custom_dns.name_servers"``. + + When changing the DNS provider from one type to another, + pass the new provider's field name as part of the field + mask. For example, when changing from a Google Domains DNS + configuration to a Custom DNS configuration, the + ``update_mask`` is ``"custom_dns"``. // + validate_only (bool): + Validate the request without actually + updating the DNS settings. + """ + + registration: str = proto.Field( + proto.STRING, + number=1, + ) + dns_settings: "DnsSettings" = proto.Field( + proto.MESSAGE, + number=2, + message="DnsSettings", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class ConfigureContactSettingsRequest(proto.Message): + r"""Request for the ``ConfigureContactSettings`` method. + + Attributes: + registration (str): + Required. The name of the ``Registration`` whose contact + settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + contact_settings (google.cloud.domains_v1.types.ContactSettings): + Fields of the ``ContactSettings`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to update + as a comma-separated list. For example, if only the + registrant contact is being updated, the ``update_mask`` is + ``"registrant_contact"``. + contact_notices (MutableSequence[google.cloud.domains_v1.types.ContactNotice]): + The list of contact notices that the caller acknowledges. + The notices needed here depend on the values specified in + ``contact_settings``. + validate_only (bool): + Validate the request without actually + updating the contact settings. + """ + + registration: str = proto.Field( + proto.STRING, + number=1, + ) + contact_settings: "ContactSettings" = proto.Field( + proto.MESSAGE, + number=2, + message="ContactSettings", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + contact_notices: MutableSequence["ContactNotice"] = proto.RepeatedField( + proto.ENUM, + number=4, + enum="ContactNotice", + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class ExportRegistrationRequest(proto.Message): + r"""Request for the ``ExportRegistration`` method. + + Attributes: + name (str): + Required. The name of the ``Registration`` to export, in the + format ``projects/*/locations/*/registrations/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteRegistrationRequest(proto.Message): + r"""Request for the ``DeleteRegistration`` method. + + Attributes: + name (str): + Required. The name of the ``Registration`` to delete, in the + format ``projects/*/locations/*/registrations/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RetrieveAuthorizationCodeRequest(proto.Message): + r"""Request for the ``RetrieveAuthorizationCode`` method. + + Attributes: + registration (str): + Required. The name of the ``Registration`` whose + authorization code is being retrieved, in the format + ``projects/*/locations/*/registrations/*``. + """ + + registration: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ResetAuthorizationCodeRequest(proto.Message): + r"""Request for the ``ResetAuthorizationCode`` method. + + Attributes: + registration (str): + Required. The name of the ``Registration`` whose + authorization code is being reset, in the format + ``projects/*/locations/*/registrations/*``. + """ + + registration: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RegisterParameters(proto.Message): + r"""Parameters required to register a new domain. + + Attributes: + domain_name (str): + The domain name. Unicode domain names are + expressed in Punycode format. + availability (google.cloud.domains_v1.types.RegisterParameters.Availability): + Indicates whether the domain is available for registration. + This value is accurate when obtained by calling + ``RetrieveRegisterParameters``, but is approximate when + obtained by calling ``SearchDomains``. + supported_privacy (MutableSequence[google.cloud.domains_v1.types.ContactPrivacy]): + Contact privacy options that the domain + supports. + domain_notices (MutableSequence[google.cloud.domains_v1.types.DomainNotice]): + Notices about special properties of the + domain. + yearly_price (google.type.money_pb2.Money): + Price to register or renew the domain for one + year. + """ + + class Availability(proto.Enum): + r"""Possible availability states of a domain name. + + Values: + AVAILABILITY_UNSPECIFIED (0): + The availability is unspecified. + AVAILABLE (1): + The domain is available for registration. + UNAVAILABLE (2): + The domain is not available for registration. + Generally this means it is already registered to + another party. + UNSUPPORTED (3): + The domain is not currently supported by + Cloud Domains, but may be available elsewhere. + UNKNOWN (4): + Cloud Domains is unable to determine domain + availability, generally due to system + maintenance at the domain name registry. + """ + AVAILABILITY_UNSPECIFIED = 0 + AVAILABLE = 1 + UNAVAILABLE = 2 + UNSUPPORTED = 3 + UNKNOWN = 4 + + domain_name: str = proto.Field( + proto.STRING, + number=1, + ) + availability: Availability = proto.Field( + proto.ENUM, + number=2, + enum=Availability, + ) + supported_privacy: MutableSequence["ContactPrivacy"] = proto.RepeatedField( + proto.ENUM, + number=3, + enum="ContactPrivacy", + ) + domain_notices: MutableSequence["DomainNotice"] = proto.RepeatedField( + proto.ENUM, + number=4, + enum="DomainNotice", + ) + yearly_price: money_pb2.Money = proto.Field( + proto.MESSAGE, + number=5, + message=money_pb2.Money, + ) + + +class TransferParameters(proto.Message): + r"""Parameters required to transfer a domain from another + registrar. + + Attributes: + domain_name (str): + The domain name. Unicode domain names are + expressed in Punycode format. + current_registrar (str): + The registrar that currently manages the + domain. + name_servers (MutableSequence[str]): + The name servers that currently store the + configuration of the domain. + transfer_lock_state (google.cloud.domains_v1.types.TransferLockState): + Indicates whether the domain is protected by a transfer + lock. For a transfer to succeed, this must show + ``UNLOCKED``. To unlock a domain, go to its current + registrar. + supported_privacy (MutableSequence[google.cloud.domains_v1.types.ContactPrivacy]): + Contact privacy options that the domain + supports. + yearly_price (google.type.money_pb2.Money): + Price to transfer or renew the domain for one + year. + """ + + domain_name: str = proto.Field( + proto.STRING, + number=1, + ) + current_registrar: str = proto.Field( + proto.STRING, + number=2, + ) + name_servers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + transfer_lock_state: "TransferLockState" = proto.Field( + proto.ENUM, + number=4, + enum="TransferLockState", + ) + supported_privacy: MutableSequence["ContactPrivacy"] = proto.RepeatedField( + proto.ENUM, + number=5, + enum="ContactPrivacy", + ) + yearly_price: money_pb2.Money = proto.Field( + proto.MESSAGE, + number=6, + message=money_pb2.Money, + ) + + +class AuthorizationCode(proto.Message): + r"""Defines an authorization code. + + Attributes: + code (str): + The Authorization Code in ASCII. It can be + used to transfer the domain to or from another + registrar. + """ + + code: str = proto.Field( + proto.STRING, + number=1, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. Output + only. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation was created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation finished running. + target (str): + Server-defined resource path for the target + of the operation. + verb (str): + Name of the verb executed by the operation. + status_detail (str): + Human-readable status of the operation, if + any. + api_version (str): + API version used to start the operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_detail: str = proto.Field( + proto.STRING, + number=5, + ) + api_version: str = proto.Field( + proto.STRING, + number=6, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/__init__.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/__init__.py new file mode 100644 index 000000000000..fd4d43c7c8b3 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/__init__.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.domains_v1beta1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.domains import DomainsAsyncClient, DomainsClient +from .types.domains import ( + AuthorizationCode, + ConfigureContactSettingsRequest, + ConfigureDnsSettingsRequest, + ConfigureManagementSettingsRequest, + ContactNotice, + ContactPrivacy, + ContactSettings, + DeleteRegistrationRequest, + DnsSettings, + DomainNotice, + ExportRegistrationRequest, + GetRegistrationRequest, + ListRegistrationsRequest, + ListRegistrationsResponse, + ManagementSettings, + OperationMetadata, + RegisterDomainRequest, + RegisterParameters, + Registration, + ResetAuthorizationCodeRequest, + RetrieveAuthorizationCodeRequest, + RetrieveRegisterParametersRequest, + RetrieveRegisterParametersResponse, + RetrieveTransferParametersRequest, + RetrieveTransferParametersResponse, + SearchDomainsRequest, + SearchDomainsResponse, + TransferDomainRequest, + TransferLockState, + TransferParameters, + UpdateRegistrationRequest, +) + +__all__ = ( + "DomainsAsyncClient", + "AuthorizationCode", + "ConfigureContactSettingsRequest", + "ConfigureDnsSettingsRequest", + "ConfigureManagementSettingsRequest", + "ContactNotice", + "ContactPrivacy", + "ContactSettings", + "DeleteRegistrationRequest", + "DnsSettings", + "DomainNotice", + "DomainsClient", + "ExportRegistrationRequest", + "GetRegistrationRequest", + "ListRegistrationsRequest", + "ListRegistrationsResponse", + "ManagementSettings", + "OperationMetadata", + "RegisterDomainRequest", + "RegisterParameters", + "Registration", + "ResetAuthorizationCodeRequest", + "RetrieveAuthorizationCodeRequest", + "RetrieveRegisterParametersRequest", + "RetrieveRegisterParametersResponse", + "RetrieveTransferParametersRequest", + "RetrieveTransferParametersResponse", + "SearchDomainsRequest", + "SearchDomainsResponse", + "TransferDomainRequest", + "TransferLockState", + "TransferParameters", + "UpdateRegistrationRequest", +) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/gapic_metadata.json b/packages/google-cloud-domains/google/cloud/domains_v1beta1/gapic_metadata.json new file mode 100644 index 000000000000..fa29a8dff5a9 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/gapic_metadata.json @@ -0,0 +1,253 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.domains_v1beta1", + "protoPackage": "google.cloud.domains.v1beta1", + "schema": "1.0", + "services": { + "Domains": { + "clients": { + "grpc": { + "libraryClient": "DomainsClient", + "rpcs": { + "ConfigureContactSettings": { + "methods": [ + "configure_contact_settings" + ] + }, + "ConfigureDnsSettings": { + "methods": [ + "configure_dns_settings" + ] + }, + "ConfigureManagementSettings": { + "methods": [ + "configure_management_settings" + ] + }, + "DeleteRegistration": { + "methods": [ + "delete_registration" + ] + }, + "ExportRegistration": { + "methods": [ + "export_registration" + ] + }, + "GetRegistration": { + "methods": [ + "get_registration" + ] + }, + "ListRegistrations": { + "methods": [ + "list_registrations" + ] + }, + "RegisterDomain": { + "methods": [ + "register_domain" + ] + }, + "ResetAuthorizationCode": { + "methods": [ + "reset_authorization_code" + ] + }, + "RetrieveAuthorizationCode": { + "methods": [ + "retrieve_authorization_code" + ] + }, + "RetrieveRegisterParameters": { + "methods": [ + "retrieve_register_parameters" + ] + }, + "RetrieveTransferParameters": { + "methods": [ + "retrieve_transfer_parameters" + ] + }, + "SearchDomains": { + "methods": [ + "search_domains" + ] + }, + "TransferDomain": { + "methods": [ + "transfer_domain" + ] + }, + "UpdateRegistration": { + "methods": [ + "update_registration" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DomainsAsyncClient", + "rpcs": { + "ConfigureContactSettings": { + "methods": [ + "configure_contact_settings" + ] + }, + "ConfigureDnsSettings": { + "methods": [ + "configure_dns_settings" + ] + }, + "ConfigureManagementSettings": { + "methods": [ + "configure_management_settings" + ] + }, + "DeleteRegistration": { + "methods": [ + "delete_registration" + ] + }, + "ExportRegistration": { + "methods": [ + "export_registration" + ] + }, + "GetRegistration": { + "methods": [ + "get_registration" + ] + }, + "ListRegistrations": { + "methods": [ + "list_registrations" + ] + }, + "RegisterDomain": { + "methods": [ + "register_domain" + ] + }, + "ResetAuthorizationCode": { + "methods": [ + "reset_authorization_code" + ] + }, + "RetrieveAuthorizationCode": { + "methods": [ + "retrieve_authorization_code" + ] + }, + "RetrieveRegisterParameters": { + "methods": [ + "retrieve_register_parameters" + ] + }, + "RetrieveTransferParameters": { + "methods": [ + "retrieve_transfer_parameters" + ] + }, + "SearchDomains": { + "methods": [ + "search_domains" + ] + }, + "TransferDomain": { + "methods": [ + "transfer_domain" + ] + }, + "UpdateRegistration": { + "methods": [ + "update_registration" + ] + } + } + }, + "rest": { + "libraryClient": "DomainsClient", + "rpcs": { + "ConfigureContactSettings": { + "methods": [ + "configure_contact_settings" + ] + }, + "ConfigureDnsSettings": { + "methods": [ + "configure_dns_settings" + ] + }, + "ConfigureManagementSettings": { + "methods": [ + "configure_management_settings" + ] + }, + "DeleteRegistration": { + "methods": [ + "delete_registration" + ] + }, + "ExportRegistration": { + "methods": [ + "export_registration" + ] + }, + "GetRegistration": { + "methods": [ + "get_registration" + ] + }, + "ListRegistrations": { + "methods": [ + "list_registrations" + ] + }, + "RegisterDomain": { + "methods": [ + "register_domain" + ] + }, + "ResetAuthorizationCode": { + "methods": [ + "reset_authorization_code" + ] + }, + "RetrieveAuthorizationCode": { + "methods": [ + "retrieve_authorization_code" + ] + }, + "RetrieveRegisterParameters": { + "methods": [ + "retrieve_register_parameters" + ] + }, + "RetrieveTransferParameters": { + "methods": [ + "retrieve_transfer_parameters" + ] + }, + "SearchDomains": { + "methods": [ + "search_domains" + ] + }, + "TransferDomain": { + "methods": [ + "transfer_domain" + ] + }, + "UpdateRegistration": { + "methods": [ + "update_registration" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/gapic_version.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/gapic_version.py new file mode 100644 index 000000000000..69ff013987c9 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.5.1" # {x-release-please-version} diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/py.typed b/packages/google-cloud-domains/google/cloud/domains_v1beta1/py.typed new file mode 100644 index 000000000000..b463d6ccaf9d --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-domains package uses inline types. diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/__init__.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/__init__.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/__init__.py new file mode 100644 index 000000000000..aa19fb0a5548 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import DomainsAsyncClient +from .client import DomainsClient + +__all__ = ( + "DomainsClient", + "DomainsAsyncClient", +) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/async_client.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/async_client.py new file mode 100644 index 000000000000..3db694217d2b --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/async_client.py @@ -0,0 +1,2343 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.domains_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import money_pb2 # type: ignore + +from google.cloud.domains_v1beta1.services.domains import pagers +from google.cloud.domains_v1beta1.types import domains + +from .client import DomainsClient +from .transports.base import DEFAULT_CLIENT_INFO, DomainsTransport +from .transports.grpc_asyncio import DomainsGrpcAsyncIOTransport + + +class DomainsAsyncClient: + """The Cloud Domains API enables management and configuration of + domain names. + """ + + _client: DomainsClient + + DEFAULT_ENDPOINT = DomainsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DomainsClient.DEFAULT_MTLS_ENDPOINT + + registration_path = staticmethod(DomainsClient.registration_path) + parse_registration_path = staticmethod(DomainsClient.parse_registration_path) + common_billing_account_path = staticmethod( + DomainsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DomainsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DomainsClient.common_folder_path) + parse_common_folder_path = staticmethod(DomainsClient.parse_common_folder_path) + common_organization_path = staticmethod(DomainsClient.common_organization_path) + parse_common_organization_path = staticmethod( + DomainsClient.parse_common_organization_path + ) + common_project_path = staticmethod(DomainsClient.common_project_path) + parse_common_project_path = staticmethod(DomainsClient.parse_common_project_path) + common_location_path = staticmethod(DomainsClient.common_location_path) + parse_common_location_path = staticmethod(DomainsClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DomainsAsyncClient: The constructed client. + """ + return DomainsClient.from_service_account_info.__func__(DomainsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DomainsAsyncClient: The constructed client. + """ + return DomainsClient.from_service_account_file.__func__(DomainsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DomainsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DomainsTransport: + """Returns the transport used by the client instance. + + Returns: + DomainsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(DomainsClient).get_transport_class, type(DomainsClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DomainsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the domains client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DomainsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DomainsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def search_domains( + self, + request: Optional[Union[domains.SearchDomainsRequest, dict]] = None, + *, + location: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.SearchDomainsResponse: + r"""Searches for available domain names similar to the provided + query. + + Availability results from this method are approximate; call + ``RetrieveRegisterParameters`` on a domain before registering to + confirm availability. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + async def sample_search_domains(): + # Create a client + client = domains_v1beta1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1beta1.SearchDomainsRequest( + query="query_value", + location="location_value", + ) + + # Make the request + response = await client.search_domains(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1beta1.types.SearchDomainsRequest, dict]]): + The request object. Request for the ``SearchDomains`` method. + location (:class:`str`): + Required. The location. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (:class:`str`): + Required. String used to search for + available domain names. + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1beta1.types.SearchDomainsResponse: + Response for the SearchDomains method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location, query]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.SearchDomainsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_domains, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def retrieve_register_parameters( + self, + request: Optional[ + Union[domains.RetrieveRegisterParametersRequest, dict] + ] = None, + *, + location: Optional[str] = None, + domain_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.RetrieveRegisterParametersResponse: + r"""Gets parameters needed to register a new domain name, including + price and up-to-date availability. Use the returned values to + call ``RegisterDomain``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + async def sample_retrieve_register_parameters(): + # Create a client + client = domains_v1beta1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1beta1.RetrieveRegisterParametersRequest( + domain_name="domain_name_value", + location="location_value", + ) + + # Make the request + response = await client.retrieve_register_parameters(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1beta1.types.RetrieveRegisterParametersRequest, dict]]): + The request object. Request for the ``RetrieveRegisterParameters`` method. + location (:class:`str`): + Required. The location. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + domain_name (:class:`str`): + Required. The domain name. Unicode + domain names must be expressed in + Punycode format. + + This corresponds to the ``domain_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1beta1.types.RetrieveRegisterParametersResponse: + Response for the RetrieveRegisterParameters method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location, domain_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.RetrieveRegisterParametersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + if domain_name is not None: + request.domain_name = domain_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.retrieve_register_parameters, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def register_domain( + self, + request: Optional[Union[domains.RegisterDomainRequest, dict]] = None, + *, + parent: Optional[str] = None, + registration: Optional[domains.Registration] = None, + yearly_price: Optional[money_pb2.Money] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Registers a new domain name and creates a corresponding + ``Registration`` resource. + + Call ``RetrieveRegisterParameters`` first to check availability + of the domain name and determine parameters like price that are + needed to build a call to this method. + + A successful call creates a ``Registration`` resource in state + ``REGISTRATION_PENDING``, which resolves to ``ACTIVE`` within + 1-2 minutes, indicating that the domain was successfully + registered. If the resource ends up in state + ``REGISTRATION_FAILED``, it indicates that the domain was not + registered successfully, and you can safely delete the resource + and retry registration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + async def sample_register_domain(): + # Create a client + client = domains_v1beta1.DomainsAsyncClient() + + # Initialize request argument(s) + registration = domains_v1beta1.Registration() + registration.domain_name = "domain_name_value" + registration.contact_settings.privacy = "REDACTED_CONTACT_DATA" + registration.contact_settings.registrant_contact.email = "email_value" + registration.contact_settings.registrant_contact.phone_number = "phone_number_value" + registration.contact_settings.admin_contact.email = "email_value" + registration.contact_settings.admin_contact.phone_number = "phone_number_value" + registration.contact_settings.technical_contact.email = "email_value" + registration.contact_settings.technical_contact.phone_number = "phone_number_value" + + request = domains_v1beta1.RegisterDomainRequest( + parent="parent_value", + registration=registration, + ) + + # Make the request + operation = client.register_domain(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1beta1.types.RegisterDomainRequest, dict]]): + The request object. Request for the ``RegisterDomain`` method. + parent (:class:`str`): + Required. The parent resource of the ``Registration``. + Must be in the format ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + registration (:class:`google.cloud.domains_v1beta1.types.Registration`): + Required. The complete ``Registration`` resource to be + created. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + yearly_price (:class:`google.type.money_pb2.Money`): + Required. Yearly price to register or + renew the domain. The value that should + be put here can be obtained from + RetrieveRegisterParameters or + SearchDomains calls. + + This corresponds to the ``yearly_price`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1beta1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, registration, yearly_price]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.RegisterDomainRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if registration is not None: + request.registration = registration + if yearly_price is not None: + request.yearly_price = yearly_price + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.register_domain, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def retrieve_transfer_parameters( + self, + request: Optional[ + Union[domains.RetrieveTransferParametersRequest, dict] + ] = None, + *, + location: Optional[str] = None, + domain_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.RetrieveTransferParametersResponse: + r"""Gets parameters needed to transfer a domain name from another + registrar to Cloud Domains. For domains managed by Google + Domains, transferring to Cloud Domains is not supported. + + Use the returned values to call ``TransferDomain``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + async def sample_retrieve_transfer_parameters(): + # Create a client + client = domains_v1beta1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1beta1.RetrieveTransferParametersRequest( + domain_name="domain_name_value", + location="location_value", + ) + + # Make the request + response = await client.retrieve_transfer_parameters(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1beta1.types.RetrieveTransferParametersRequest, dict]]): + The request object. Request for the ``RetrieveTransferParameters`` method. + location (:class:`str`): + Required. The location. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + domain_name (:class:`str`): + Required. The domain name. Unicode + domain names must be expressed in + Punycode format. + + This corresponds to the ``domain_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1beta1.types.RetrieveTransferParametersResponse: + Response for the RetrieveTransferParameters method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location, domain_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.RetrieveTransferParametersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + if domain_name is not None: + request.domain_name = domain_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.retrieve_transfer_parameters, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def transfer_domain( + self, + request: Optional[Union[domains.TransferDomainRequest, dict]] = None, + *, + parent: Optional[str] = None, + registration: Optional[domains.Registration] = None, + yearly_price: Optional[money_pb2.Money] = None, + authorization_code: Optional[domains.AuthorizationCode] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Transfers a domain name from another registrar to Cloud Domains. + For domains managed by Google Domains, transferring to Cloud + Domains is not supported. + + Before calling this method, go to the domain's current registrar + to unlock the domain for transfer and retrieve the domain's + transfer authorization code. Then call + ``RetrieveTransferParameters`` to confirm that the domain is + unlocked and to get values needed to build a call to this + method. + + A successful call creates a ``Registration`` resource in state + ``TRANSFER_PENDING``. It can take several days to complete the + transfer process. The registrant can often speed up this process + by approving the transfer through the current registrar, either + by clicking a link in an email from the registrar or by visiting + the registrar's website. + + A few minutes after transfer approval, the resource transitions + to state ``ACTIVE``, indicating that the transfer was + successful. If the transfer is rejected or the request expires + without being approved, the resource can end up in state + ``TRANSFER_FAILED``. If transfer fails, you can safely delete + the resource and retry the transfer. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + async def sample_transfer_domain(): + # Create a client + client = domains_v1beta1.DomainsAsyncClient() + + # Initialize request argument(s) + registration = domains_v1beta1.Registration() + registration.domain_name = "domain_name_value" + registration.contact_settings.privacy = "REDACTED_CONTACT_DATA" + registration.contact_settings.registrant_contact.email = "email_value" + registration.contact_settings.registrant_contact.phone_number = "phone_number_value" + registration.contact_settings.admin_contact.email = "email_value" + registration.contact_settings.admin_contact.phone_number = "phone_number_value" + registration.contact_settings.technical_contact.email = "email_value" + registration.contact_settings.technical_contact.phone_number = "phone_number_value" + + request = domains_v1beta1.TransferDomainRequest( + parent="parent_value", + registration=registration, + ) + + # Make the request + operation = client.transfer_domain(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1beta1.types.TransferDomainRequest, dict]]): + The request object. Request for the ``TransferDomain`` method. + parent (:class:`str`): + Required. The parent resource of the ``Registration``. + Must be in the format ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + registration (:class:`google.cloud.domains_v1beta1.types.Registration`): + Required. The complete ``Registration`` resource to be + created. + + You can leave ``registration.dns_settings`` unset to + import the domain's current DNS configuration from its + current registrar. Use this option only if you are sure + that the domain's current DNS service does not cease + upon transfer, as is often the case for DNS services + provided for free by the registrar. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + yearly_price (:class:`google.type.money_pb2.Money`): + Required. Acknowledgement of the price to transfer or + renew the domain for one year. Call + ``RetrieveTransferParameters`` to obtain the price, + which you must acknowledge. + + This corresponds to the ``yearly_price`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + authorization_code (:class:`google.cloud.domains_v1beta1.types.AuthorizationCode`): + The domain's transfer authorization + code. You can obtain this from the + domain's current registrar. + + This corresponds to the ``authorization_code`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1beta1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, registration, yearly_price, authorization_code] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.TransferDomainRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if registration is not None: + request.registration = registration + if yearly_price is not None: + request.yearly_price = yearly_price + if authorization_code is not None: + request.authorization_code = authorization_code + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.transfer_domain, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_registrations( + self, + request: Optional[Union[domains.ListRegistrationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRegistrationsAsyncPager: + r"""Lists the ``Registration`` resources in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + async def sample_list_registrations(): + # Create a client + client = domains_v1beta1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1beta1.ListRegistrationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_registrations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1beta1.types.ListRegistrationsRequest, dict]]): + The request object. Request for the ``ListRegistrations`` method. + parent (:class:`str`): + Required. The project and location from which to list + ``Registration``\ s, specified in the format + ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1beta1.services.domains.pagers.ListRegistrationsAsyncPager: + Response for the ListRegistrations method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.ListRegistrationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_registrations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListRegistrationsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_registration( + self, + request: Optional[Union[domains.GetRegistrationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.Registration: + r"""Gets the details of a ``Registration`` resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + async def sample_get_registration(): + # Create a client + client = domains_v1beta1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1beta1.GetRegistrationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_registration(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1beta1.types.GetRegistrationRequest, dict]]): + The request object. Request for the ``GetRegistration`` method. + name (:class:`str`): + Required. The name of the ``Registration`` to get, in + the format ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1beta1.types.Registration: + The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.GetRegistrationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_registration, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_registration( + self, + request: Optional[Union[domains.UpdateRegistrationRequest, dict]] = None, + *, + registration: Optional[domains.Registration] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates select fields of a ``Registration`` resource, notably + ``labels``. To update other fields, use the appropriate custom + update method: + + - To update management settings, see + ``ConfigureManagementSettings`` + - To update DNS configuration, see ``ConfigureDnsSettings`` + - To update contact information, see + ``ConfigureContactSettings`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + async def sample_update_registration(): + # Create a client + client = domains_v1beta1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1beta1.UpdateRegistrationRequest( + ) + + # Make the request + operation = client.update_registration(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1beta1.types.UpdateRegistrationRequest, dict]]): + The request object. Request for the ``UpdateRegistration`` method. + registration (:class:`google.cloud.domains_v1beta1.types.Registration`): + Fields of the ``Registration`` to update. + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the labels are being updated, the ``update_mask`` is + ``"labels"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1beta1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.UpdateRegistrationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_registration, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration.name", request.registration.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def configure_management_settings( + self, + request: Optional[ + Union[domains.ConfigureManagementSettingsRequest, dict] + ] = None, + *, + registration: Optional[str] = None, + management_settings: Optional[domains.ManagementSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a ``Registration``'s management settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + async def sample_configure_management_settings(): + # Create a client + client = domains_v1beta1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1beta1.ConfigureManagementSettingsRequest( + registration="registration_value", + ) + + # Make the request + operation = client.configure_management_settings(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1beta1.types.ConfigureManagementSettingsRequest, dict]]): + The request object. Request for the ``ConfigureManagementSettings`` method. + registration (:class:`str`): + Required. The name of the ``Registration`` whose + management settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + management_settings (:class:`google.cloud.domains_v1beta1.types.ManagementSettings`): + Fields of the ``ManagementSettings`` to update. + This corresponds to the ``management_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the transfer lock is being updated, the ``update_mask`` + is ``"transfer_lock_state"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1beta1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, management_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.ConfigureManagementSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if management_settings is not None: + request.management_settings = management_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.configure_management_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def configure_dns_settings( + self, + request: Optional[Union[domains.ConfigureDnsSettingsRequest, dict]] = None, + *, + registration: Optional[str] = None, + dns_settings: Optional[domains.DnsSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a ``Registration``'s DNS settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + async def sample_configure_dns_settings(): + # Create a client + client = domains_v1beta1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1beta1.ConfigureDnsSettingsRequest( + registration="registration_value", + ) + + # Make the request + operation = client.configure_dns_settings(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1beta1.types.ConfigureDnsSettingsRequest, dict]]): + The request object. Request for the ``ConfigureDnsSettings`` method. + registration (:class:`str`): + Required. The name of the ``Registration`` whose DNS + settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dns_settings (:class:`google.cloud.domains_v1beta1.types.DnsSettings`): + Fields of the ``DnsSettings`` to update. + This corresponds to the ``dns_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the name servers are being updated for an existing + Custom DNS configuration, the ``update_mask`` is + ``"custom_dns.name_servers"``. + + When changing the DNS provider from one type to another, + pass the new provider's field name as part of the field + mask. For example, when changing from a Google Domains + DNS configuration to a Custom DNS configuration, the + ``update_mask`` is ``"custom_dns"``. // + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1beta1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, dns_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.ConfigureDnsSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if dns_settings is not None: + request.dns_settings = dns_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.configure_dns_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def configure_contact_settings( + self, + request: Optional[Union[domains.ConfigureContactSettingsRequest, dict]] = None, + *, + registration: Optional[str] = None, + contact_settings: Optional[domains.ContactSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a ``Registration``'s contact settings. Some changes + require confirmation by the domain's registrant contact . + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + async def sample_configure_contact_settings(): + # Create a client + client = domains_v1beta1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1beta1.ConfigureContactSettingsRequest( + registration="registration_value", + ) + + # Make the request + operation = client.configure_contact_settings(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1beta1.types.ConfigureContactSettingsRequest, dict]]): + The request object. Request for the ``ConfigureContactSettings`` method. + registration (:class:`str`): + Required. The name of the ``Registration`` whose contact + settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contact_settings (:class:`google.cloud.domains_v1beta1.types.ContactSettings`): + Fields of the ``ContactSettings`` to update. + This corresponds to the ``contact_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the registrant contact is being updated, the + ``update_mask`` is ``"registrant_contact"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1beta1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, contact_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.ConfigureContactSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if contact_settings is not None: + request.contact_settings = contact_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.configure_contact_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def export_registration( + self, + request: Optional[Union[domains.ExportRegistrationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports a ``Registration`` resource, such that it is no longer + managed by Cloud Domains. + + When an active domain is successfully exported, you can continue + to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + async def sample_export_registration(): + # Create a client + client = domains_v1beta1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1beta1.ExportRegistrationRequest( + name="name_value", + ) + + # Make the request + operation = client.export_registration(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1beta1.types.ExportRegistrationRequest, dict]]): + The request object. Request for the ``ExportRegistration`` method. + name (:class:`str`): + Required. The name of the ``Registration`` to export, in + the format ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1beta1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.ExportRegistrationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_registration, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_registration( + self, + request: Optional[Union[domains.DeleteRegistrationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a ``Registration`` resource. + + This method works on any ``Registration`` resource using + `Subscription or Commitment + billing `__, provided that the + resource was created at least 1 day in the past. + + For ``Registration`` resources using `Monthly + billing `__, this method works + if: + + - ``state`` is ``EXPORTED`` with ``expire_time`` in the past + - ``state`` is ``REGISTRATION_FAILED`` + - ``state`` is ``TRANSFER_FAILED`` + + When an active registration is successfully deleted, you can + continue to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + async def sample_delete_registration(): + # Create a client + client = domains_v1beta1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1beta1.DeleteRegistrationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_registration(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1beta1.types.DeleteRegistrationRequest, dict]]): + The request object. Request for the ``DeleteRegistration`` method. + name (:class:`str`): + Required. The name of the ``Registration`` to delete, in + the format ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.DeleteRegistrationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_registration, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + async def retrieve_authorization_code( + self, + request: Optional[Union[domains.RetrieveAuthorizationCodeRequest, dict]] = None, + *, + registration: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.AuthorizationCode: + r"""Gets the authorization code of the ``Registration`` for the + purpose of transferring the domain to another registrar. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + async def sample_retrieve_authorization_code(): + # Create a client + client = domains_v1beta1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1beta1.RetrieveAuthorizationCodeRequest( + registration="registration_value", + ) + + # Make the request + response = await client.retrieve_authorization_code(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1beta1.types.RetrieveAuthorizationCodeRequest, dict]]): + The request object. Request for the ``RetrieveAuthorizationCode`` method. + registration (:class:`str`): + Required. The name of the ``Registration`` whose + authorization code is being retrieved, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1beta1.types.AuthorizationCode: + Defines an authorization code. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.RetrieveAuthorizationCodeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.retrieve_authorization_code, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def reset_authorization_code( + self, + request: Optional[Union[domains.ResetAuthorizationCodeRequest, dict]] = None, + *, + registration: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.AuthorizationCode: + r"""Resets the authorization code of the ``Registration`` to a new + random string. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + async def sample_reset_authorization_code(): + # Create a client + client = domains_v1beta1.DomainsAsyncClient() + + # Initialize request argument(s) + request = domains_v1beta1.ResetAuthorizationCodeRequest( + registration="registration_value", + ) + + # Make the request + response = await client.reset_authorization_code(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.domains_v1beta1.types.ResetAuthorizationCodeRequest, dict]]): + The request object. Request for the ``ResetAuthorizationCode`` method. + registration (:class:`str`): + Required. The name of the ``Registration`` whose + authorization code is being reset, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1beta1.types.AuthorizationCode: + Defines an authorization code. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = domains.ResetAuthorizationCodeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reset_authorization_code, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DomainsAsyncClient",) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/client.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/client.py new file mode 100644 index 000000000000..48a1a73b0874 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/client.py @@ -0,0 +1,2595 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.domains_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import money_pb2 # type: ignore + +from google.cloud.domains_v1beta1.services.domains import pagers +from google.cloud.domains_v1beta1.types import domains + +from .transports.base import DEFAULT_CLIENT_INFO, DomainsTransport +from .transports.grpc import DomainsGrpcTransport +from .transports.grpc_asyncio import DomainsGrpcAsyncIOTransport +from .transports.rest import DomainsRestTransport + + +class DomainsClientMeta(type): + """Metaclass for the Domains client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[DomainsTransport]] + _transport_registry["grpc"] = DomainsGrpcTransport + _transport_registry["grpc_asyncio"] = DomainsGrpcAsyncIOTransport + _transport_registry["rest"] = DomainsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DomainsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DomainsClient(metaclass=DomainsClientMeta): + """The Cloud Domains API enables management and configuration of + domain names. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "domains.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DomainsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DomainsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DomainsTransport: + """Returns the transport used by the client instance. + + Returns: + DomainsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def registration_path( + project: str, + location: str, + registration: str, + ) -> str: + """Returns a fully-qualified registration string.""" + return "projects/{project}/locations/{location}/registrations/{registration}".format( + project=project, + location=location, + registration=registration, + ) + + @staticmethod + def parse_registration_path(path: str) -> Dict[str, str]: + """Parses a registration path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/registrations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DomainsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the domains client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DomainsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DomainsTransport): + # transport is a DomainsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def search_domains( + self, + request: Optional[Union[domains.SearchDomainsRequest, dict]] = None, + *, + location: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.SearchDomainsResponse: + r"""Searches for available domain names similar to the provided + query. + + Availability results from this method are approximate; call + ``RetrieveRegisterParameters`` on a domain before registering to + confirm availability. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + def sample_search_domains(): + # Create a client + client = domains_v1beta1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1beta1.SearchDomainsRequest( + query="query_value", + location="location_value", + ) + + # Make the request + response = client.search_domains(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1beta1.types.SearchDomainsRequest, dict]): + The request object. Request for the ``SearchDomains`` method. + location (str): + Required. The location. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (str): + Required. String used to search for + available domain names. + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1beta1.types.SearchDomainsResponse: + Response for the SearchDomains method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location, query]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.SearchDomainsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.SearchDomainsRequest): + request = domains.SearchDomainsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_domains] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def retrieve_register_parameters( + self, + request: Optional[ + Union[domains.RetrieveRegisterParametersRequest, dict] + ] = None, + *, + location: Optional[str] = None, + domain_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.RetrieveRegisterParametersResponse: + r"""Gets parameters needed to register a new domain name, including + price and up-to-date availability. Use the returned values to + call ``RegisterDomain``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + def sample_retrieve_register_parameters(): + # Create a client + client = domains_v1beta1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1beta1.RetrieveRegisterParametersRequest( + domain_name="domain_name_value", + location="location_value", + ) + + # Make the request + response = client.retrieve_register_parameters(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1beta1.types.RetrieveRegisterParametersRequest, dict]): + The request object. Request for the ``RetrieveRegisterParameters`` method. + location (str): + Required. The location. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + domain_name (str): + Required. The domain name. Unicode + domain names must be expressed in + Punycode format. + + This corresponds to the ``domain_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1beta1.types.RetrieveRegisterParametersResponse: + Response for the RetrieveRegisterParameters method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location, domain_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.RetrieveRegisterParametersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.RetrieveRegisterParametersRequest): + request = domains.RetrieveRegisterParametersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + if domain_name is not None: + request.domain_name = domain_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.retrieve_register_parameters + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def register_domain( + self, + request: Optional[Union[domains.RegisterDomainRequest, dict]] = None, + *, + parent: Optional[str] = None, + registration: Optional[domains.Registration] = None, + yearly_price: Optional[money_pb2.Money] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Registers a new domain name and creates a corresponding + ``Registration`` resource. + + Call ``RetrieveRegisterParameters`` first to check availability + of the domain name and determine parameters like price that are + needed to build a call to this method. + + A successful call creates a ``Registration`` resource in state + ``REGISTRATION_PENDING``, which resolves to ``ACTIVE`` within + 1-2 minutes, indicating that the domain was successfully + registered. If the resource ends up in state + ``REGISTRATION_FAILED``, it indicates that the domain was not + registered successfully, and you can safely delete the resource + and retry registration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + def sample_register_domain(): + # Create a client + client = domains_v1beta1.DomainsClient() + + # Initialize request argument(s) + registration = domains_v1beta1.Registration() + registration.domain_name = "domain_name_value" + registration.contact_settings.privacy = "REDACTED_CONTACT_DATA" + registration.contact_settings.registrant_contact.email = "email_value" + registration.contact_settings.registrant_contact.phone_number = "phone_number_value" + registration.contact_settings.admin_contact.email = "email_value" + registration.contact_settings.admin_contact.phone_number = "phone_number_value" + registration.contact_settings.technical_contact.email = "email_value" + registration.contact_settings.technical_contact.phone_number = "phone_number_value" + + request = domains_v1beta1.RegisterDomainRequest( + parent="parent_value", + registration=registration, + ) + + # Make the request + operation = client.register_domain(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1beta1.types.RegisterDomainRequest, dict]): + The request object. Request for the ``RegisterDomain`` method. + parent (str): + Required. The parent resource of the ``Registration``. + Must be in the format ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + registration (google.cloud.domains_v1beta1.types.Registration): + Required. The complete ``Registration`` resource to be + created. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + yearly_price (google.type.money_pb2.Money): + Required. Yearly price to register or + renew the domain. The value that should + be put here can be obtained from + RetrieveRegisterParameters or + SearchDomains calls. + + This corresponds to the ``yearly_price`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1beta1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, registration, yearly_price]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.RegisterDomainRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.RegisterDomainRequest): + request = domains.RegisterDomainRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if registration is not None: + request.registration = registration + if yearly_price is not None: + request.yearly_price = yearly_price + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.register_domain] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def retrieve_transfer_parameters( + self, + request: Optional[ + Union[domains.RetrieveTransferParametersRequest, dict] + ] = None, + *, + location: Optional[str] = None, + domain_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.RetrieveTransferParametersResponse: + r"""Gets parameters needed to transfer a domain name from another + registrar to Cloud Domains. For domains managed by Google + Domains, transferring to Cloud Domains is not supported. + + Use the returned values to call ``TransferDomain``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + def sample_retrieve_transfer_parameters(): + # Create a client + client = domains_v1beta1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1beta1.RetrieveTransferParametersRequest( + domain_name="domain_name_value", + location="location_value", + ) + + # Make the request + response = client.retrieve_transfer_parameters(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1beta1.types.RetrieveTransferParametersRequest, dict]): + The request object. Request for the ``RetrieveTransferParameters`` method. + location (str): + Required. The location. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + domain_name (str): + Required. The domain name. Unicode + domain names must be expressed in + Punycode format. + + This corresponds to the ``domain_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1beta1.types.RetrieveTransferParametersResponse: + Response for the RetrieveTransferParameters method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location, domain_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.RetrieveTransferParametersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.RetrieveTransferParametersRequest): + request = domains.RetrieveTransferParametersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + if domain_name is not None: + request.domain_name = domain_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.retrieve_transfer_parameters + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def transfer_domain( + self, + request: Optional[Union[domains.TransferDomainRequest, dict]] = None, + *, + parent: Optional[str] = None, + registration: Optional[domains.Registration] = None, + yearly_price: Optional[money_pb2.Money] = None, + authorization_code: Optional[domains.AuthorizationCode] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Transfers a domain name from another registrar to Cloud Domains. + For domains managed by Google Domains, transferring to Cloud + Domains is not supported. + + Before calling this method, go to the domain's current registrar + to unlock the domain for transfer and retrieve the domain's + transfer authorization code. Then call + ``RetrieveTransferParameters`` to confirm that the domain is + unlocked and to get values needed to build a call to this + method. + + A successful call creates a ``Registration`` resource in state + ``TRANSFER_PENDING``. It can take several days to complete the + transfer process. The registrant can often speed up this process + by approving the transfer through the current registrar, either + by clicking a link in an email from the registrar or by visiting + the registrar's website. + + A few minutes after transfer approval, the resource transitions + to state ``ACTIVE``, indicating that the transfer was + successful. If the transfer is rejected or the request expires + without being approved, the resource can end up in state + ``TRANSFER_FAILED``. If transfer fails, you can safely delete + the resource and retry the transfer. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + def sample_transfer_domain(): + # Create a client + client = domains_v1beta1.DomainsClient() + + # Initialize request argument(s) + registration = domains_v1beta1.Registration() + registration.domain_name = "domain_name_value" + registration.contact_settings.privacy = "REDACTED_CONTACT_DATA" + registration.contact_settings.registrant_contact.email = "email_value" + registration.contact_settings.registrant_contact.phone_number = "phone_number_value" + registration.contact_settings.admin_contact.email = "email_value" + registration.contact_settings.admin_contact.phone_number = "phone_number_value" + registration.contact_settings.technical_contact.email = "email_value" + registration.contact_settings.technical_contact.phone_number = "phone_number_value" + + request = domains_v1beta1.TransferDomainRequest( + parent="parent_value", + registration=registration, + ) + + # Make the request + operation = client.transfer_domain(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1beta1.types.TransferDomainRequest, dict]): + The request object. Request for the ``TransferDomain`` method. + parent (str): + Required. The parent resource of the ``Registration``. + Must be in the format ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + registration (google.cloud.domains_v1beta1.types.Registration): + Required. The complete ``Registration`` resource to be + created. + + You can leave ``registration.dns_settings`` unset to + import the domain's current DNS configuration from its + current registrar. Use this option only if you are sure + that the domain's current DNS service does not cease + upon transfer, as is often the case for DNS services + provided for free by the registrar. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + yearly_price (google.type.money_pb2.Money): + Required. Acknowledgement of the price to transfer or + renew the domain for one year. Call + ``RetrieveTransferParameters`` to obtain the price, + which you must acknowledge. + + This corresponds to the ``yearly_price`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + authorization_code (google.cloud.domains_v1beta1.types.AuthorizationCode): + The domain's transfer authorization + code. You can obtain this from the + domain's current registrar. + + This corresponds to the ``authorization_code`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1beta1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, registration, yearly_price, authorization_code] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.TransferDomainRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.TransferDomainRequest): + request = domains.TransferDomainRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if registration is not None: + request.registration = registration + if yearly_price is not None: + request.yearly_price = yearly_price + if authorization_code is not None: + request.authorization_code = authorization_code + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.transfer_domain] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_registrations( + self, + request: Optional[Union[domains.ListRegistrationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRegistrationsPager: + r"""Lists the ``Registration`` resources in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + def sample_list_registrations(): + # Create a client + client = domains_v1beta1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1beta1.ListRegistrationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_registrations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.domains_v1beta1.types.ListRegistrationsRequest, dict]): + The request object. Request for the ``ListRegistrations`` method. + parent (str): + Required. The project and location from which to list + ``Registration``\ s, specified in the format + ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1beta1.services.domains.pagers.ListRegistrationsPager: + Response for the ListRegistrations method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.ListRegistrationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.ListRegistrationsRequest): + request = domains.ListRegistrationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_registrations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListRegistrationsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_registration( + self, + request: Optional[Union[domains.GetRegistrationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.Registration: + r"""Gets the details of a ``Registration`` resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + def sample_get_registration(): + # Create a client + client = domains_v1beta1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1beta1.GetRegistrationRequest( + name="name_value", + ) + + # Make the request + response = client.get_registration(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1beta1.types.GetRegistrationRequest, dict]): + The request object. Request for the ``GetRegistration`` method. + name (str): + Required. The name of the ``Registration`` to get, in + the format ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1beta1.types.Registration: + The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.GetRegistrationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.GetRegistrationRequest): + request = domains.GetRegistrationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_registration] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_registration( + self, + request: Optional[Union[domains.UpdateRegistrationRequest, dict]] = None, + *, + registration: Optional[domains.Registration] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates select fields of a ``Registration`` resource, notably + ``labels``. To update other fields, use the appropriate custom + update method: + + - To update management settings, see + ``ConfigureManagementSettings`` + - To update DNS configuration, see ``ConfigureDnsSettings`` + - To update contact information, see + ``ConfigureContactSettings`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + def sample_update_registration(): + # Create a client + client = domains_v1beta1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1beta1.UpdateRegistrationRequest( + ) + + # Make the request + operation = client.update_registration(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1beta1.types.UpdateRegistrationRequest, dict]): + The request object. Request for the ``UpdateRegistration`` method. + registration (google.cloud.domains_v1beta1.types.Registration): + Fields of the ``Registration`` to update. + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the labels are being updated, the ``update_mask`` is + ``"labels"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1beta1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.UpdateRegistrationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.UpdateRegistrationRequest): + request = domains.UpdateRegistrationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_registration] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration.name", request.registration.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def configure_management_settings( + self, + request: Optional[ + Union[domains.ConfigureManagementSettingsRequest, dict] + ] = None, + *, + registration: Optional[str] = None, + management_settings: Optional[domains.ManagementSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a ``Registration``'s management settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + def sample_configure_management_settings(): + # Create a client + client = domains_v1beta1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1beta1.ConfigureManagementSettingsRequest( + registration="registration_value", + ) + + # Make the request + operation = client.configure_management_settings(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1beta1.types.ConfigureManagementSettingsRequest, dict]): + The request object. Request for the ``ConfigureManagementSettings`` method. + registration (str): + Required. The name of the ``Registration`` whose + management settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + management_settings (google.cloud.domains_v1beta1.types.ManagementSettings): + Fields of the ``ManagementSettings`` to update. + This corresponds to the ``management_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the transfer lock is being updated, the ``update_mask`` + is ``"transfer_lock_state"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1beta1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, management_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.ConfigureManagementSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.ConfigureManagementSettingsRequest): + request = domains.ConfigureManagementSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if management_settings is not None: + request.management_settings = management_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.configure_management_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def configure_dns_settings( + self, + request: Optional[Union[domains.ConfigureDnsSettingsRequest, dict]] = None, + *, + registration: Optional[str] = None, + dns_settings: Optional[domains.DnsSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a ``Registration``'s DNS settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + def sample_configure_dns_settings(): + # Create a client + client = domains_v1beta1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1beta1.ConfigureDnsSettingsRequest( + registration="registration_value", + ) + + # Make the request + operation = client.configure_dns_settings(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1beta1.types.ConfigureDnsSettingsRequest, dict]): + The request object. Request for the ``ConfigureDnsSettings`` method. + registration (str): + Required. The name of the ``Registration`` whose DNS + settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dns_settings (google.cloud.domains_v1beta1.types.DnsSettings): + Fields of the ``DnsSettings`` to update. + This corresponds to the ``dns_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the name servers are being updated for an existing + Custom DNS configuration, the ``update_mask`` is + ``"custom_dns.name_servers"``. + + When changing the DNS provider from one type to another, + pass the new provider's field name as part of the field + mask. For example, when changing from a Google Domains + DNS configuration to a Custom DNS configuration, the + ``update_mask`` is ``"custom_dns"``. // + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1beta1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, dns_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.ConfigureDnsSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.ConfigureDnsSettingsRequest): + request = domains.ConfigureDnsSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if dns_settings is not None: + request.dns_settings = dns_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.configure_dns_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def configure_contact_settings( + self, + request: Optional[Union[domains.ConfigureContactSettingsRequest, dict]] = None, + *, + registration: Optional[str] = None, + contact_settings: Optional[domains.ContactSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a ``Registration``'s contact settings. Some changes + require confirmation by the domain's registrant contact . + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + def sample_configure_contact_settings(): + # Create a client + client = domains_v1beta1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1beta1.ConfigureContactSettingsRequest( + registration="registration_value", + ) + + # Make the request + operation = client.configure_contact_settings(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1beta1.types.ConfigureContactSettingsRequest, dict]): + The request object. Request for the ``ConfigureContactSettings`` method. + registration (str): + Required. The name of the ``Registration`` whose contact + settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contact_settings (google.cloud.domains_v1beta1.types.ContactSettings): + Fields of the ``ContactSettings`` to update. + This corresponds to the ``contact_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to + update as a comma-separated list. For example, if only + the registrant contact is being updated, the + ``update_mask`` is ``"registrant_contact"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1beta1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration, contact_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.ConfigureContactSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.ConfigureContactSettingsRequest): + request = domains.ConfigureContactSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + if contact_settings is not None: + request.contact_settings = contact_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.configure_contact_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def export_registration( + self, + request: Optional[Union[domains.ExportRegistrationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports a ``Registration`` resource, such that it is no longer + managed by Cloud Domains. + + When an active domain is successfully exported, you can continue + to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + def sample_export_registration(): + # Create a client + client = domains_v1beta1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1beta1.ExportRegistrationRequest( + name="name_value", + ) + + # Make the request + operation = client.export_registration(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1beta1.types.ExportRegistrationRequest, dict]): + The request object. Request for the ``ExportRegistration`` method. + name (str): + Required. The name of the ``Registration`` to export, in + the format ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.domains_v1beta1.types.Registration` The Registration resource facilitates managing and configuring domain name + registrations. + + There are several ways to create a new Registration + resource: + + To create a new Registration resource, find a + suitable domain name by calling the SearchDomains + method with a query to see available domain name + options. After choosing a name, call + RetrieveRegisterParameters to ensure availability and + obtain information like pricing, which is needed to + build a call to RegisterDomain. + + Another way to create a new Registration is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the + domain for transfer and retrieve the domain's + transfer authorization code. Then call + RetrieveTransferParameters to confirm that the domain + is unlocked and to get values needed to build a call + to TransferDomain. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.ExportRegistrationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.ExportRegistrationRequest): + request = domains.ExportRegistrationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_registration] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + domains.Registration, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_registration( + self, + request: Optional[Union[domains.DeleteRegistrationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a ``Registration`` resource. + + This method works on any ``Registration`` resource using + `Subscription or Commitment + billing `__, provided that the + resource was created at least 1 day in the past. + + For ``Registration`` resources using `Monthly + billing `__, this method works + if: + + - ``state`` is ``EXPORTED`` with ``expire_time`` in the past + - ``state`` is ``REGISTRATION_FAILED`` + - ``state`` is ``TRANSFER_FAILED`` + + When an active registration is successfully deleted, you can + continue to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + def sample_delete_registration(): + # Create a client + client = domains_v1beta1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1beta1.DeleteRegistrationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_registration(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1beta1.types.DeleteRegistrationRequest, dict]): + The request object. Request for the ``DeleteRegistration`` method. + name (str): + Required. The name of the ``Registration`` to delete, in + the format ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.DeleteRegistrationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.DeleteRegistrationRequest): + request = domains.DeleteRegistrationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_registration] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=domains.OperationMetadata, + ) + + # Done; return the response. + return response + + def retrieve_authorization_code( + self, + request: Optional[Union[domains.RetrieveAuthorizationCodeRequest, dict]] = None, + *, + registration: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.AuthorizationCode: + r"""Gets the authorization code of the ``Registration`` for the + purpose of transferring the domain to another registrar. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + def sample_retrieve_authorization_code(): + # Create a client + client = domains_v1beta1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1beta1.RetrieveAuthorizationCodeRequest( + registration="registration_value", + ) + + # Make the request + response = client.retrieve_authorization_code(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1beta1.types.RetrieveAuthorizationCodeRequest, dict]): + The request object. Request for the ``RetrieveAuthorizationCode`` method. + registration (str): + Required. The name of the ``Registration`` whose + authorization code is being retrieved, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1beta1.types.AuthorizationCode: + Defines an authorization code. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.RetrieveAuthorizationCodeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.RetrieveAuthorizationCodeRequest): + request = domains.RetrieveAuthorizationCodeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.retrieve_authorization_code + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def reset_authorization_code( + self, + request: Optional[Union[domains.ResetAuthorizationCodeRequest, dict]] = None, + *, + registration: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.AuthorizationCode: + r"""Resets the authorization code of the ``Registration`` to a new + random string. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import domains_v1beta1 + + def sample_reset_authorization_code(): + # Create a client + client = domains_v1beta1.DomainsClient() + + # Initialize request argument(s) + request = domains_v1beta1.ResetAuthorizationCodeRequest( + registration="registration_value", + ) + + # Make the request + response = client.reset_authorization_code(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.domains_v1beta1.types.ResetAuthorizationCodeRequest, dict]): + The request object. Request for the ``ResetAuthorizationCode`` method. + registration (str): + Required. The name of the ``Registration`` whose + authorization code is being reset, in the format + ``projects/*/locations/*/registrations/*``. + + This corresponds to the ``registration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.domains_v1beta1.types.AuthorizationCode: + Defines an authorization code. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([registration]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a domains.ResetAuthorizationCodeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, domains.ResetAuthorizationCodeRequest): + request = domains.ResetAuthorizationCodeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if registration is not None: + request.registration = registration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reset_authorization_code] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("registration", request.registration),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DomainsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DomainsClient",) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/pagers.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/pagers.py new file mode 100644 index 000000000000..6a6a335ce1dd --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.domains_v1beta1.types import domains + + +class ListRegistrationsPager: + """A pager for iterating through ``list_registrations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.domains_v1beta1.types.ListRegistrationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``registrations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListRegistrations`` requests and continue to iterate + through the ``registrations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.domains_v1beta1.types.ListRegistrationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., domains.ListRegistrationsResponse], + request: domains.ListRegistrationsRequest, + response: domains.ListRegistrationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.domains_v1beta1.types.ListRegistrationsRequest): + The initial request object. + response (google.cloud.domains_v1beta1.types.ListRegistrationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = domains.ListRegistrationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[domains.ListRegistrationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[domains.Registration]: + for page in self.pages: + yield from page.registrations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRegistrationsAsyncPager: + """A pager for iterating through ``list_registrations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.domains_v1beta1.types.ListRegistrationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``registrations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListRegistrations`` requests and continue to iterate + through the ``registrations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.domains_v1beta1.types.ListRegistrationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[domains.ListRegistrationsResponse]], + request: domains.ListRegistrationsRequest, + response: domains.ListRegistrationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.domains_v1beta1.types.ListRegistrationsRequest): + The initial request object. + response (google.cloud.domains_v1beta1.types.ListRegistrationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = domains.ListRegistrationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[domains.ListRegistrationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[domains.Registration]: + async def async_generator(): + async for page in self.pages: + for response in page.registrations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/__init__.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/__init__.py new file mode 100644 index 000000000000..8351ddd28377 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DomainsTransport +from .grpc import DomainsGrpcTransport +from .grpc_asyncio import DomainsGrpcAsyncIOTransport +from .rest import DomainsRestInterceptor, DomainsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DomainsTransport]] +_transport_registry["grpc"] = DomainsGrpcTransport +_transport_registry["grpc_asyncio"] = DomainsGrpcAsyncIOTransport +_transport_registry["rest"] = DomainsRestTransport + +__all__ = ( + "DomainsTransport", + "DomainsGrpcTransport", + "DomainsGrpcAsyncIOTransport", + "DomainsRestTransport", + "DomainsRestInterceptor", +) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/base.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/base.py new file mode 100644 index 000000000000..5f97d303a17a --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/base.py @@ -0,0 +1,366 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.domains_v1beta1 import gapic_version as package_version +from google.cloud.domains_v1beta1.types import domains + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class DomainsTransport(abc.ABC): + """Abstract transport class for Domains.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "domains.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.search_domains: gapic_v1.method.wrap_method( + self.search_domains, + default_timeout=None, + client_info=client_info, + ), + self.retrieve_register_parameters: gapic_v1.method.wrap_method( + self.retrieve_register_parameters, + default_timeout=None, + client_info=client_info, + ), + self.register_domain: gapic_v1.method.wrap_method( + self.register_domain, + default_timeout=None, + client_info=client_info, + ), + self.retrieve_transfer_parameters: gapic_v1.method.wrap_method( + self.retrieve_transfer_parameters, + default_timeout=None, + client_info=client_info, + ), + self.transfer_domain: gapic_v1.method.wrap_method( + self.transfer_domain, + default_timeout=None, + client_info=client_info, + ), + self.list_registrations: gapic_v1.method.wrap_method( + self.list_registrations, + default_timeout=None, + client_info=client_info, + ), + self.get_registration: gapic_v1.method.wrap_method( + self.get_registration, + default_timeout=None, + client_info=client_info, + ), + self.update_registration: gapic_v1.method.wrap_method( + self.update_registration, + default_timeout=None, + client_info=client_info, + ), + self.configure_management_settings: gapic_v1.method.wrap_method( + self.configure_management_settings, + default_timeout=None, + client_info=client_info, + ), + self.configure_dns_settings: gapic_v1.method.wrap_method( + self.configure_dns_settings, + default_timeout=None, + client_info=client_info, + ), + self.configure_contact_settings: gapic_v1.method.wrap_method( + self.configure_contact_settings, + default_timeout=None, + client_info=client_info, + ), + self.export_registration: gapic_v1.method.wrap_method( + self.export_registration, + default_timeout=None, + client_info=client_info, + ), + self.delete_registration: gapic_v1.method.wrap_method( + self.delete_registration, + default_timeout=None, + client_info=client_info, + ), + self.retrieve_authorization_code: gapic_v1.method.wrap_method( + self.retrieve_authorization_code, + default_timeout=None, + client_info=client_info, + ), + self.reset_authorization_code: gapic_v1.method.wrap_method( + self.reset_authorization_code, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def search_domains( + self, + ) -> Callable[ + [domains.SearchDomainsRequest], + Union[domains.SearchDomainsResponse, Awaitable[domains.SearchDomainsResponse]], + ]: + raise NotImplementedError() + + @property + def retrieve_register_parameters( + self, + ) -> Callable[ + [domains.RetrieveRegisterParametersRequest], + Union[ + domains.RetrieveRegisterParametersResponse, + Awaitable[domains.RetrieveRegisterParametersResponse], + ], + ]: + raise NotImplementedError() + + @property + def register_domain( + self, + ) -> Callable[ + [domains.RegisterDomainRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def retrieve_transfer_parameters( + self, + ) -> Callable[ + [domains.RetrieveTransferParametersRequest], + Union[ + domains.RetrieveTransferParametersResponse, + Awaitable[domains.RetrieveTransferParametersResponse], + ], + ]: + raise NotImplementedError() + + @property + def transfer_domain( + self, + ) -> Callable[ + [domains.TransferDomainRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_registrations( + self, + ) -> Callable[ + [domains.ListRegistrationsRequest], + Union[ + domains.ListRegistrationsResponse, + Awaitable[domains.ListRegistrationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_registration( + self, + ) -> Callable[ + [domains.GetRegistrationRequest], + Union[domains.Registration, Awaitable[domains.Registration]], + ]: + raise NotImplementedError() + + @property + def update_registration( + self, + ) -> Callable[ + [domains.UpdateRegistrationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def configure_management_settings( + self, + ) -> Callable[ + [domains.ConfigureManagementSettingsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def configure_dns_settings( + self, + ) -> Callable[ + [domains.ConfigureDnsSettingsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def configure_contact_settings( + self, + ) -> Callable[ + [domains.ConfigureContactSettingsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def export_registration( + self, + ) -> Callable[ + [domains.ExportRegistrationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_registration( + self, + ) -> Callable[ + [domains.DeleteRegistrationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def retrieve_authorization_code( + self, + ) -> Callable[ + [domains.RetrieveAuthorizationCodeRequest], + Union[domains.AuthorizationCode, Awaitable[domains.AuthorizationCode]], + ]: + raise NotImplementedError() + + @property + def reset_authorization_code( + self, + ) -> Callable[ + [domains.ResetAuthorizationCodeRequest], + Union[domains.AuthorizationCode, Awaitable[domains.AuthorizationCode]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DomainsTransport",) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/grpc.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/grpc.py new file mode 100644 index 000000000000..2bdbdb4f28b3 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/grpc.py @@ -0,0 +1,754 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.domains_v1beta1.types import domains + +from .base import DEFAULT_CLIENT_INFO, DomainsTransport + + +class DomainsGrpcTransport(DomainsTransport): + """gRPC backend transport for Domains. + + The Cloud Domains API enables management and configuration of + domain names. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "domains.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "domains.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def search_domains( + self, + ) -> Callable[[domains.SearchDomainsRequest], domains.SearchDomainsResponse]: + r"""Return a callable for the search domains method over gRPC. + + Searches for available domain names similar to the provided + query. + + Availability results from this method are approximate; call + ``RetrieveRegisterParameters`` on a domain before registering to + confirm availability. + + Returns: + Callable[[~.SearchDomainsRequest], + ~.SearchDomainsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_domains" not in self._stubs: + self._stubs["search_domains"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/SearchDomains", + request_serializer=domains.SearchDomainsRequest.serialize, + response_deserializer=domains.SearchDomainsResponse.deserialize, + ) + return self._stubs["search_domains"] + + @property + def retrieve_register_parameters( + self, + ) -> Callable[ + [domains.RetrieveRegisterParametersRequest], + domains.RetrieveRegisterParametersResponse, + ]: + r"""Return a callable for the retrieve register parameters method over gRPC. + + Gets parameters needed to register a new domain name, including + price and up-to-date availability. Use the returned values to + call ``RegisterDomain``. + + Returns: + Callable[[~.RetrieveRegisterParametersRequest], + ~.RetrieveRegisterParametersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_register_parameters" not in self._stubs: + self._stubs["retrieve_register_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/RetrieveRegisterParameters", + request_serializer=domains.RetrieveRegisterParametersRequest.serialize, + response_deserializer=domains.RetrieveRegisterParametersResponse.deserialize, + ) + return self._stubs["retrieve_register_parameters"] + + @property + def register_domain( + self, + ) -> Callable[[domains.RegisterDomainRequest], operations_pb2.Operation]: + r"""Return a callable for the register domain method over gRPC. + + Registers a new domain name and creates a corresponding + ``Registration`` resource. + + Call ``RetrieveRegisterParameters`` first to check availability + of the domain name and determine parameters like price that are + needed to build a call to this method. + + A successful call creates a ``Registration`` resource in state + ``REGISTRATION_PENDING``, which resolves to ``ACTIVE`` within + 1-2 minutes, indicating that the domain was successfully + registered. If the resource ends up in state + ``REGISTRATION_FAILED``, it indicates that the domain was not + registered successfully, and you can safely delete the resource + and retry registration. + + Returns: + Callable[[~.RegisterDomainRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "register_domain" not in self._stubs: + self._stubs["register_domain"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/RegisterDomain", + request_serializer=domains.RegisterDomainRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["register_domain"] + + @property + def retrieve_transfer_parameters( + self, + ) -> Callable[ + [domains.RetrieveTransferParametersRequest], + domains.RetrieveTransferParametersResponse, + ]: + r"""Return a callable for the retrieve transfer parameters method over gRPC. + + Gets parameters needed to transfer a domain name from another + registrar to Cloud Domains. For domains managed by Google + Domains, transferring to Cloud Domains is not supported. + + Use the returned values to call ``TransferDomain``. + + Returns: + Callable[[~.RetrieveTransferParametersRequest], + ~.RetrieveTransferParametersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_transfer_parameters" not in self._stubs: + self._stubs["retrieve_transfer_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/RetrieveTransferParameters", + request_serializer=domains.RetrieveTransferParametersRequest.serialize, + response_deserializer=domains.RetrieveTransferParametersResponse.deserialize, + ) + return self._stubs["retrieve_transfer_parameters"] + + @property + def transfer_domain( + self, + ) -> Callable[[domains.TransferDomainRequest], operations_pb2.Operation]: + r"""Return a callable for the transfer domain method over gRPC. + + Transfers a domain name from another registrar to Cloud Domains. + For domains managed by Google Domains, transferring to Cloud + Domains is not supported. + + Before calling this method, go to the domain's current registrar + to unlock the domain for transfer and retrieve the domain's + transfer authorization code. Then call + ``RetrieveTransferParameters`` to confirm that the domain is + unlocked and to get values needed to build a call to this + method. + + A successful call creates a ``Registration`` resource in state + ``TRANSFER_PENDING``. It can take several days to complete the + transfer process. The registrant can often speed up this process + by approving the transfer through the current registrar, either + by clicking a link in an email from the registrar or by visiting + the registrar's website. + + A few minutes after transfer approval, the resource transitions + to state ``ACTIVE``, indicating that the transfer was + successful. If the transfer is rejected or the request expires + without being approved, the resource can end up in state + ``TRANSFER_FAILED``. If transfer fails, you can safely delete + the resource and retry the transfer. + + Returns: + Callable[[~.TransferDomainRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "transfer_domain" not in self._stubs: + self._stubs["transfer_domain"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/TransferDomain", + request_serializer=domains.TransferDomainRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["transfer_domain"] + + @property + def list_registrations( + self, + ) -> Callable[ + [domains.ListRegistrationsRequest], domains.ListRegistrationsResponse + ]: + r"""Return a callable for the list registrations method over gRPC. + + Lists the ``Registration`` resources in a project. + + Returns: + Callable[[~.ListRegistrationsRequest], + ~.ListRegistrationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_registrations" not in self._stubs: + self._stubs["list_registrations"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/ListRegistrations", + request_serializer=domains.ListRegistrationsRequest.serialize, + response_deserializer=domains.ListRegistrationsResponse.deserialize, + ) + return self._stubs["list_registrations"] + + @property + def get_registration( + self, + ) -> Callable[[domains.GetRegistrationRequest], domains.Registration]: + r"""Return a callable for the get registration method over gRPC. + + Gets the details of a ``Registration`` resource. + + Returns: + Callable[[~.GetRegistrationRequest], + ~.Registration]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_registration" not in self._stubs: + self._stubs["get_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/GetRegistration", + request_serializer=domains.GetRegistrationRequest.serialize, + response_deserializer=domains.Registration.deserialize, + ) + return self._stubs["get_registration"] + + @property + def update_registration( + self, + ) -> Callable[[domains.UpdateRegistrationRequest], operations_pb2.Operation]: + r"""Return a callable for the update registration method over gRPC. + + Updates select fields of a ``Registration`` resource, notably + ``labels``. To update other fields, use the appropriate custom + update method: + + - To update management settings, see + ``ConfigureManagementSettings`` + - To update DNS configuration, see ``ConfigureDnsSettings`` + - To update contact information, see + ``ConfigureContactSettings`` + + Returns: + Callable[[~.UpdateRegistrationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_registration" not in self._stubs: + self._stubs["update_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/UpdateRegistration", + request_serializer=domains.UpdateRegistrationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_registration"] + + @property + def configure_management_settings( + self, + ) -> Callable[ + [domains.ConfigureManagementSettingsRequest], operations_pb2.Operation + ]: + r"""Return a callable for the configure management settings method over gRPC. + + Updates a ``Registration``'s management settings. + + Returns: + Callable[[~.ConfigureManagementSettingsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "configure_management_settings" not in self._stubs: + self._stubs[ + "configure_management_settings" + ] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/ConfigureManagementSettings", + request_serializer=domains.ConfigureManagementSettingsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["configure_management_settings"] + + @property + def configure_dns_settings( + self, + ) -> Callable[[domains.ConfigureDnsSettingsRequest], operations_pb2.Operation]: + r"""Return a callable for the configure dns settings method over gRPC. + + Updates a ``Registration``'s DNS settings. + + Returns: + Callable[[~.ConfigureDnsSettingsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "configure_dns_settings" not in self._stubs: + self._stubs["configure_dns_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/ConfigureDnsSettings", + request_serializer=domains.ConfigureDnsSettingsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["configure_dns_settings"] + + @property + def configure_contact_settings( + self, + ) -> Callable[[domains.ConfigureContactSettingsRequest], operations_pb2.Operation]: + r"""Return a callable for the configure contact settings method over gRPC. + + Updates a ``Registration``'s contact settings. Some changes + require confirmation by the domain's registrant contact . + + Returns: + Callable[[~.ConfigureContactSettingsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "configure_contact_settings" not in self._stubs: + self._stubs["configure_contact_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/ConfigureContactSettings", + request_serializer=domains.ConfigureContactSettingsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["configure_contact_settings"] + + @property + def export_registration( + self, + ) -> Callable[[domains.ExportRegistrationRequest], operations_pb2.Operation]: + r"""Return a callable for the export registration method over gRPC. + + Exports a ``Registration`` resource, such that it is no longer + managed by Cloud Domains. + + When an active domain is successfully exported, you can continue + to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + Returns: + Callable[[~.ExportRegistrationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_registration" not in self._stubs: + self._stubs["export_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/ExportRegistration", + request_serializer=domains.ExportRegistrationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_registration"] + + @property + def delete_registration( + self, + ) -> Callable[[domains.DeleteRegistrationRequest], operations_pb2.Operation]: + r"""Return a callable for the delete registration method over gRPC. + + Deletes a ``Registration`` resource. + + This method works on any ``Registration`` resource using + `Subscription or Commitment + billing `__, provided that the + resource was created at least 1 day in the past. + + For ``Registration`` resources using `Monthly + billing `__, this method works + if: + + - ``state`` is ``EXPORTED`` with ``expire_time`` in the past + - ``state`` is ``REGISTRATION_FAILED`` + - ``state`` is ``TRANSFER_FAILED`` + + When an active registration is successfully deleted, you can + continue to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + Returns: + Callable[[~.DeleteRegistrationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_registration" not in self._stubs: + self._stubs["delete_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/DeleteRegistration", + request_serializer=domains.DeleteRegistrationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_registration"] + + @property + def retrieve_authorization_code( + self, + ) -> Callable[ + [domains.RetrieveAuthorizationCodeRequest], domains.AuthorizationCode + ]: + r"""Return a callable for the retrieve authorization code method over gRPC. + + Gets the authorization code of the ``Registration`` for the + purpose of transferring the domain to another registrar. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + Returns: + Callable[[~.RetrieveAuthorizationCodeRequest], + ~.AuthorizationCode]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_authorization_code" not in self._stubs: + self._stubs["retrieve_authorization_code"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/RetrieveAuthorizationCode", + request_serializer=domains.RetrieveAuthorizationCodeRequest.serialize, + response_deserializer=domains.AuthorizationCode.deserialize, + ) + return self._stubs["retrieve_authorization_code"] + + @property + def reset_authorization_code( + self, + ) -> Callable[[domains.ResetAuthorizationCodeRequest], domains.AuthorizationCode]: + r"""Return a callable for the reset authorization code method over gRPC. + + Resets the authorization code of the ``Registration`` to a new + random string. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + Returns: + Callable[[~.ResetAuthorizationCodeRequest], + ~.AuthorizationCode]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reset_authorization_code" not in self._stubs: + self._stubs["reset_authorization_code"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/ResetAuthorizationCode", + request_serializer=domains.ResetAuthorizationCodeRequest.serialize, + response_deserializer=domains.AuthorizationCode.deserialize, + ) + return self._stubs["reset_authorization_code"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DomainsGrpcTransport",) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/grpc_asyncio.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/grpc_asyncio.py new file mode 100644 index 000000000000..1237375b289c --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/grpc_asyncio.py @@ -0,0 +1,770 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.domains_v1beta1.types import domains + +from .base import DEFAULT_CLIENT_INFO, DomainsTransport +from .grpc import DomainsGrpcTransport + + +class DomainsGrpcAsyncIOTransport(DomainsTransport): + """gRPC AsyncIO backend transport for Domains. + + The Cloud Domains API enables management and configuration of + domain names. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "domains.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "domains.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def search_domains( + self, + ) -> Callable[ + [domains.SearchDomainsRequest], Awaitable[domains.SearchDomainsResponse] + ]: + r"""Return a callable for the search domains method over gRPC. + + Searches for available domain names similar to the provided + query. + + Availability results from this method are approximate; call + ``RetrieveRegisterParameters`` on a domain before registering to + confirm availability. + + Returns: + Callable[[~.SearchDomainsRequest], + Awaitable[~.SearchDomainsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_domains" not in self._stubs: + self._stubs["search_domains"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/SearchDomains", + request_serializer=domains.SearchDomainsRequest.serialize, + response_deserializer=domains.SearchDomainsResponse.deserialize, + ) + return self._stubs["search_domains"] + + @property + def retrieve_register_parameters( + self, + ) -> Callable[ + [domains.RetrieveRegisterParametersRequest], + Awaitable[domains.RetrieveRegisterParametersResponse], + ]: + r"""Return a callable for the retrieve register parameters method over gRPC. + + Gets parameters needed to register a new domain name, including + price and up-to-date availability. Use the returned values to + call ``RegisterDomain``. + + Returns: + Callable[[~.RetrieveRegisterParametersRequest], + Awaitable[~.RetrieveRegisterParametersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_register_parameters" not in self._stubs: + self._stubs["retrieve_register_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/RetrieveRegisterParameters", + request_serializer=domains.RetrieveRegisterParametersRequest.serialize, + response_deserializer=domains.RetrieveRegisterParametersResponse.deserialize, + ) + return self._stubs["retrieve_register_parameters"] + + @property + def register_domain( + self, + ) -> Callable[[domains.RegisterDomainRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the register domain method over gRPC. + + Registers a new domain name and creates a corresponding + ``Registration`` resource. + + Call ``RetrieveRegisterParameters`` first to check availability + of the domain name and determine parameters like price that are + needed to build a call to this method. + + A successful call creates a ``Registration`` resource in state + ``REGISTRATION_PENDING``, which resolves to ``ACTIVE`` within + 1-2 minutes, indicating that the domain was successfully + registered. If the resource ends up in state + ``REGISTRATION_FAILED``, it indicates that the domain was not + registered successfully, and you can safely delete the resource + and retry registration. + + Returns: + Callable[[~.RegisterDomainRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "register_domain" not in self._stubs: + self._stubs["register_domain"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/RegisterDomain", + request_serializer=domains.RegisterDomainRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["register_domain"] + + @property + def retrieve_transfer_parameters( + self, + ) -> Callable[ + [domains.RetrieveTransferParametersRequest], + Awaitable[domains.RetrieveTransferParametersResponse], + ]: + r"""Return a callable for the retrieve transfer parameters method over gRPC. + + Gets parameters needed to transfer a domain name from another + registrar to Cloud Domains. For domains managed by Google + Domains, transferring to Cloud Domains is not supported. + + Use the returned values to call ``TransferDomain``. + + Returns: + Callable[[~.RetrieveTransferParametersRequest], + Awaitable[~.RetrieveTransferParametersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_transfer_parameters" not in self._stubs: + self._stubs["retrieve_transfer_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/RetrieveTransferParameters", + request_serializer=domains.RetrieveTransferParametersRequest.serialize, + response_deserializer=domains.RetrieveTransferParametersResponse.deserialize, + ) + return self._stubs["retrieve_transfer_parameters"] + + @property + def transfer_domain( + self, + ) -> Callable[[domains.TransferDomainRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the transfer domain method over gRPC. + + Transfers a domain name from another registrar to Cloud Domains. + For domains managed by Google Domains, transferring to Cloud + Domains is not supported. + + Before calling this method, go to the domain's current registrar + to unlock the domain for transfer and retrieve the domain's + transfer authorization code. Then call + ``RetrieveTransferParameters`` to confirm that the domain is + unlocked and to get values needed to build a call to this + method. + + A successful call creates a ``Registration`` resource in state + ``TRANSFER_PENDING``. It can take several days to complete the + transfer process. The registrant can often speed up this process + by approving the transfer through the current registrar, either + by clicking a link in an email from the registrar or by visiting + the registrar's website. + + A few minutes after transfer approval, the resource transitions + to state ``ACTIVE``, indicating that the transfer was + successful. If the transfer is rejected or the request expires + without being approved, the resource can end up in state + ``TRANSFER_FAILED``. If transfer fails, you can safely delete + the resource and retry the transfer. + + Returns: + Callable[[~.TransferDomainRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "transfer_domain" not in self._stubs: + self._stubs["transfer_domain"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/TransferDomain", + request_serializer=domains.TransferDomainRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["transfer_domain"] + + @property + def list_registrations( + self, + ) -> Callable[ + [domains.ListRegistrationsRequest], Awaitable[domains.ListRegistrationsResponse] + ]: + r"""Return a callable for the list registrations method over gRPC. + + Lists the ``Registration`` resources in a project. + + Returns: + Callable[[~.ListRegistrationsRequest], + Awaitable[~.ListRegistrationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_registrations" not in self._stubs: + self._stubs["list_registrations"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/ListRegistrations", + request_serializer=domains.ListRegistrationsRequest.serialize, + response_deserializer=domains.ListRegistrationsResponse.deserialize, + ) + return self._stubs["list_registrations"] + + @property + def get_registration( + self, + ) -> Callable[[domains.GetRegistrationRequest], Awaitable[domains.Registration]]: + r"""Return a callable for the get registration method over gRPC. + + Gets the details of a ``Registration`` resource. + + Returns: + Callable[[~.GetRegistrationRequest], + Awaitable[~.Registration]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_registration" not in self._stubs: + self._stubs["get_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/GetRegistration", + request_serializer=domains.GetRegistrationRequest.serialize, + response_deserializer=domains.Registration.deserialize, + ) + return self._stubs["get_registration"] + + @property + def update_registration( + self, + ) -> Callable[ + [domains.UpdateRegistrationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update registration method over gRPC. + + Updates select fields of a ``Registration`` resource, notably + ``labels``. To update other fields, use the appropriate custom + update method: + + - To update management settings, see + ``ConfigureManagementSettings`` + - To update DNS configuration, see ``ConfigureDnsSettings`` + - To update contact information, see + ``ConfigureContactSettings`` + + Returns: + Callable[[~.UpdateRegistrationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_registration" not in self._stubs: + self._stubs["update_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/UpdateRegistration", + request_serializer=domains.UpdateRegistrationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_registration"] + + @property + def configure_management_settings( + self, + ) -> Callable[ + [domains.ConfigureManagementSettingsRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the configure management settings method over gRPC. + + Updates a ``Registration``'s management settings. + + Returns: + Callable[[~.ConfigureManagementSettingsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "configure_management_settings" not in self._stubs: + self._stubs[ + "configure_management_settings" + ] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/ConfigureManagementSettings", + request_serializer=domains.ConfigureManagementSettingsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["configure_management_settings"] + + @property + def configure_dns_settings( + self, + ) -> Callable[ + [domains.ConfigureDnsSettingsRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the configure dns settings method over gRPC. + + Updates a ``Registration``'s DNS settings. + + Returns: + Callable[[~.ConfigureDnsSettingsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "configure_dns_settings" not in self._stubs: + self._stubs["configure_dns_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/ConfigureDnsSettings", + request_serializer=domains.ConfigureDnsSettingsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["configure_dns_settings"] + + @property + def configure_contact_settings( + self, + ) -> Callable[ + [domains.ConfigureContactSettingsRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the configure contact settings method over gRPC. + + Updates a ``Registration``'s contact settings. Some changes + require confirmation by the domain's registrant contact . + + Returns: + Callable[[~.ConfigureContactSettingsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "configure_contact_settings" not in self._stubs: + self._stubs["configure_contact_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/ConfigureContactSettings", + request_serializer=domains.ConfigureContactSettingsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["configure_contact_settings"] + + @property + def export_registration( + self, + ) -> Callable[ + [domains.ExportRegistrationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the export registration method over gRPC. + + Exports a ``Registration`` resource, such that it is no longer + managed by Cloud Domains. + + When an active domain is successfully exported, you can continue + to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + Returns: + Callable[[~.ExportRegistrationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_registration" not in self._stubs: + self._stubs["export_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/ExportRegistration", + request_serializer=domains.ExportRegistrationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_registration"] + + @property + def delete_registration( + self, + ) -> Callable[ + [domains.DeleteRegistrationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete registration method over gRPC. + + Deletes a ``Registration`` resource. + + This method works on any ``Registration`` resource using + `Subscription or Commitment + billing `__, provided that the + resource was created at least 1 day in the past. + + For ``Registration`` resources using `Monthly + billing `__, this method works + if: + + - ``state`` is ``EXPORTED`` with ``expire_time`` in the past + - ``state`` is ``REGISTRATION_FAILED`` + - ``state`` is ``TRANSFER_FAILED`` + + When an active registration is successfully deleted, you can + continue to use the domain in `Google + Domains `__ until it expires. The + calling user becomes the domain's sole owner in Google Domains, + and permissions for the domain are subsequently managed there. + The domain does not renew automatically unless the new owner + sets up billing in Google Domains. + + Returns: + Callable[[~.DeleteRegistrationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_registration" not in self._stubs: + self._stubs["delete_registration"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/DeleteRegistration", + request_serializer=domains.DeleteRegistrationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_registration"] + + @property + def retrieve_authorization_code( + self, + ) -> Callable[ + [domains.RetrieveAuthorizationCodeRequest], Awaitable[domains.AuthorizationCode] + ]: + r"""Return a callable for the retrieve authorization code method over gRPC. + + Gets the authorization code of the ``Registration`` for the + purpose of transferring the domain to another registrar. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + Returns: + Callable[[~.RetrieveAuthorizationCodeRequest], + Awaitable[~.AuthorizationCode]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_authorization_code" not in self._stubs: + self._stubs["retrieve_authorization_code"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/RetrieveAuthorizationCode", + request_serializer=domains.RetrieveAuthorizationCodeRequest.serialize, + response_deserializer=domains.AuthorizationCode.deserialize, + ) + return self._stubs["retrieve_authorization_code"] + + @property + def reset_authorization_code( + self, + ) -> Callable[ + [domains.ResetAuthorizationCodeRequest], Awaitable[domains.AuthorizationCode] + ]: + r"""Return a callable for the reset authorization code method over gRPC. + + Resets the authorization code of the ``Registration`` to a new + random string. + + You can call this method only after 60 days have elapsed since + the initial domain registration. + + Returns: + Callable[[~.ResetAuthorizationCodeRequest], + Awaitable[~.AuthorizationCode]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reset_authorization_code" not in self._stubs: + self._stubs["reset_authorization_code"] = self.grpc_channel.unary_unary( + "/google.cloud.domains.v1beta1.Domains/ResetAuthorizationCode", + request_serializer=domains.ResetAuthorizationCodeRequest.serialize, + response_deserializer=domains.AuthorizationCode.deserialize, + ) + return self._stubs["reset_authorization_code"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("DomainsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/rest.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/rest.py new file mode 100644 index 000000000000..87530e6cb32d --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/rest.py @@ -0,0 +1,2251 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.domains_v1beta1.types import domains + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DomainsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DomainsRestInterceptor: + """Interceptor for Domains. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DomainsRestTransport. + + .. code-block:: python + class MyCustomDomainsInterceptor(DomainsRestInterceptor): + def pre_configure_contact_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_configure_contact_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_configure_dns_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_configure_dns_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_configure_management_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_configure_management_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_registration(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_registration(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_export_registration(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_registration(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_registration(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_registration(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_registrations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_registrations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_register_domain(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_register_domain(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_reset_authorization_code(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_reset_authorization_code(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_retrieve_authorization_code(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_retrieve_authorization_code(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_retrieve_register_parameters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_retrieve_register_parameters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_retrieve_transfer_parameters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_retrieve_transfer_parameters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_search_domains(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_domains(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_transfer_domain(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_transfer_domain(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_registration(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_registration(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DomainsRestTransport(interceptor=MyCustomDomainsInterceptor()) + client = DomainsClient(transport=transport) + + + """ + + def pre_configure_contact_settings( + self, + request: domains.ConfigureContactSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.ConfigureContactSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for configure_contact_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_configure_contact_settings( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for configure_contact_settings + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_configure_dns_settings( + self, + request: domains.ConfigureDnsSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.ConfigureDnsSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for configure_dns_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_configure_dns_settings( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for configure_dns_settings + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_configure_management_settings( + self, + request: domains.ConfigureManagementSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.ConfigureManagementSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for configure_management_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_configure_management_settings( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for configure_management_settings + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_delete_registration( + self, + request: domains.DeleteRegistrationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.DeleteRegistrationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_registration + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_delete_registration( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_registration + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_export_registration( + self, + request: domains.ExportRegistrationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.ExportRegistrationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_registration + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_export_registration( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_registration + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_get_registration( + self, + request: domains.GetRegistrationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.GetRegistrationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_registration + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_get_registration( + self, response: domains.Registration + ) -> domains.Registration: + """Post-rpc interceptor for get_registration + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_list_registrations( + self, + request: domains.ListRegistrationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.ListRegistrationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_registrations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_list_registrations( + self, response: domains.ListRegistrationsResponse + ) -> domains.ListRegistrationsResponse: + """Post-rpc interceptor for list_registrations + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_register_domain( + self, + request: domains.RegisterDomainRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.RegisterDomainRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for register_domain + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_register_domain( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for register_domain + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_reset_authorization_code( + self, + request: domains.ResetAuthorizationCodeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.ResetAuthorizationCodeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reset_authorization_code + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_reset_authorization_code( + self, response: domains.AuthorizationCode + ) -> domains.AuthorizationCode: + """Post-rpc interceptor for reset_authorization_code + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_retrieve_authorization_code( + self, + request: domains.RetrieveAuthorizationCodeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.RetrieveAuthorizationCodeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for retrieve_authorization_code + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_retrieve_authorization_code( + self, response: domains.AuthorizationCode + ) -> domains.AuthorizationCode: + """Post-rpc interceptor for retrieve_authorization_code + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_retrieve_register_parameters( + self, + request: domains.RetrieveRegisterParametersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.RetrieveRegisterParametersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for retrieve_register_parameters + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_retrieve_register_parameters( + self, response: domains.RetrieveRegisterParametersResponse + ) -> domains.RetrieveRegisterParametersResponse: + """Post-rpc interceptor for retrieve_register_parameters + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_retrieve_transfer_parameters( + self, + request: domains.RetrieveTransferParametersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.RetrieveTransferParametersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for retrieve_transfer_parameters + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_retrieve_transfer_parameters( + self, response: domains.RetrieveTransferParametersResponse + ) -> domains.RetrieveTransferParametersResponse: + """Post-rpc interceptor for retrieve_transfer_parameters + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_search_domains( + self, request: domains.SearchDomainsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[domains.SearchDomainsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for search_domains + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_search_domains( + self, response: domains.SearchDomainsResponse + ) -> domains.SearchDomainsResponse: + """Post-rpc interceptor for search_domains + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_transfer_domain( + self, + request: domains.TransferDomainRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.TransferDomainRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for transfer_domain + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_transfer_domain( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for transfer_domain + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + def pre_update_registration( + self, + request: domains.UpdateRegistrationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[domains.UpdateRegistrationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_registration + + Override in a subclass to manipulate the request or metadata + before they are sent to the Domains server. + """ + return request, metadata + + def post_update_registration( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_registration + + Override in a subclass to manipulate the response + after it is returned by the Domains server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DomainsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DomainsRestInterceptor + + +class DomainsRestTransport(DomainsTransport): + """REST backend transport for Domains. + + The Cloud Domains API enables management and configuration of + domain names. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "domains.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DomainsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DomainsRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _ConfigureContactSettings(DomainsRestStub): + def __hash__(self): + return hash("ConfigureContactSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.ConfigureContactSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the configure contact + settings method over HTTP. + + Args: + request (~.domains.ConfigureContactSettingsRequest): + The request object. Request for the ``ConfigureContactSettings`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{registration=projects/*/locations/*/registrations/*}:configureContactSettings", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_configure_contact_settings( + request, metadata + ) + pb_request = domains.ConfigureContactSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_configure_contact_settings(resp) + return resp + + class _ConfigureDnsSettings(DomainsRestStub): + def __hash__(self): + return hash("ConfigureDnsSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.ConfigureDnsSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the configure dns settings method over HTTP. + + Args: + request (~.domains.ConfigureDnsSettingsRequest): + The request object. Request for the ``ConfigureDnsSettings`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{registration=projects/*/locations/*/registrations/*}:configureDnsSettings", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_configure_dns_settings( + request, metadata + ) + pb_request = domains.ConfigureDnsSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_configure_dns_settings(resp) + return resp + + class _ConfigureManagementSettings(DomainsRestStub): + def __hash__(self): + return hash("ConfigureManagementSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.ConfigureManagementSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the configure management + settings method over HTTP. + + Args: + request (~.domains.ConfigureManagementSettingsRequest): + The request object. Request for the ``ConfigureManagementSettings`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{registration=projects/*/locations/*/registrations/*}:configureManagementSettings", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_configure_management_settings( + request, metadata + ) + pb_request = domains.ConfigureManagementSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_configure_management_settings(resp) + return resp + + class _DeleteRegistration(DomainsRestStub): + def __hash__(self): + return hash("DeleteRegistration") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.DeleteRegistrationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete registration method over HTTP. + + Args: + request (~.domains.DeleteRegistrationRequest): + The request object. Request for the ``DeleteRegistration`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta1/{name=projects/*/locations/*/registrations/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_registration( + request, metadata + ) + pb_request = domains.DeleteRegistrationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_registration(resp) + return resp + + class _ExportRegistration(DomainsRestStub): + def __hash__(self): + return hash("ExportRegistration") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.ExportRegistrationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the export registration method over HTTP. + + Args: + request (~.domains.ExportRegistrationRequest): + The request object. Request for the ``ExportRegistration`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{name=projects/*/locations/*/registrations/*}:export", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_registration( + request, metadata + ) + pb_request = domains.ExportRegistrationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_registration(resp) + return resp + + class _GetRegistration(DomainsRestStub): + def __hash__(self): + return hash("GetRegistration") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.GetRegistrationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.Registration: + r"""Call the get registration method over HTTP. + + Args: + request (~.domains.GetRegistrationRequest): + The request object. Request for the ``GetRegistration`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.domains.Registration: + The ``Registration`` resource facilitates managing and + configuring domain name registrations. + + There are several ways to create a new ``Registration`` + resource: + + To create a new ``Registration`` resource, find a + suitable domain name by calling the ``SearchDomains`` + method with a query to see available domain name + options. After choosing a name, call + ``RetrieveRegisterParameters`` to ensure availability + and obtain information like pricing, which is needed to + build a call to ``RegisterDomain``. + + Another way to create a new ``Registration`` is to + transfer an existing domain from another registrar. + First, go to the current registrar to unlock the domain + for transfer and retrieve the domain's transfer + authorization code. Then call + ``RetrieveTransferParameters`` to confirm that the + domain is unlocked and to get values needed to build a + call to ``TransferDomain``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*/registrations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_registration( + request, metadata + ) + pb_request = domains.GetRegistrationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = domains.Registration() + pb_resp = domains.Registration.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_registration(resp) + return resp + + class _ListRegistrations(DomainsRestStub): + def __hash__(self): + return hash("ListRegistrations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.ListRegistrationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.ListRegistrationsResponse: + r"""Call the list registrations method over HTTP. + + Args: + request (~.domains.ListRegistrationsRequest): + The request object. Request for the ``ListRegistrations`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.domains.ListRegistrationsResponse: + Response for the ``ListRegistrations`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{parent=projects/*/locations/*}/registrations", + }, + ] + request, metadata = self._interceptor.pre_list_registrations( + request, metadata + ) + pb_request = domains.ListRegistrationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = domains.ListRegistrationsResponse() + pb_resp = domains.ListRegistrationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_registrations(resp) + return resp + + class _RegisterDomain(DomainsRestStub): + def __hash__(self): + return hash("RegisterDomain") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.RegisterDomainRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the register domain method over HTTP. + + Args: + request (~.domains.RegisterDomainRequest): + The request object. Request for the ``RegisterDomain`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{parent=projects/*/locations/*}/registrations:register", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_register_domain(request, metadata) + pb_request = domains.RegisterDomainRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_register_domain(resp) + return resp + + class _ResetAuthorizationCode(DomainsRestStub): + def __hash__(self): + return hash("ResetAuthorizationCode") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.ResetAuthorizationCodeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.AuthorizationCode: + r"""Call the reset authorization code method over HTTP. + + Args: + request (~.domains.ResetAuthorizationCodeRequest): + The request object. Request for the ``ResetAuthorizationCode`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.domains.AuthorizationCode: + Defines an authorization code. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{registration=projects/*/locations/*/registrations/*}:resetAuthorizationCode", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_reset_authorization_code( + request, metadata + ) + pb_request = domains.ResetAuthorizationCodeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = domains.AuthorizationCode() + pb_resp = domains.AuthorizationCode.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reset_authorization_code(resp) + return resp + + class _RetrieveAuthorizationCode(DomainsRestStub): + def __hash__(self): + return hash("RetrieveAuthorizationCode") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.RetrieveAuthorizationCodeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.AuthorizationCode: + r"""Call the retrieve authorization + code method over HTTP. + + Args: + request (~.domains.RetrieveAuthorizationCodeRequest): + The request object. Request for the ``RetrieveAuthorizationCode`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.domains.AuthorizationCode: + Defines an authorization code. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{registration=projects/*/locations/*/registrations/*}:retrieveAuthorizationCode", + }, + ] + request, metadata = self._interceptor.pre_retrieve_authorization_code( + request, metadata + ) + pb_request = domains.RetrieveAuthorizationCodeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = domains.AuthorizationCode() + pb_resp = domains.AuthorizationCode.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_retrieve_authorization_code(resp) + return resp + + class _RetrieveRegisterParameters(DomainsRestStub): + def __hash__(self): + return hash("RetrieveRegisterParameters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "domainName": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.RetrieveRegisterParametersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.RetrieveRegisterParametersResponse: + r"""Call the retrieve register + parameters method over HTTP. + + Args: + request (~.domains.RetrieveRegisterParametersRequest): + The request object. Request for the ``RetrieveRegisterParameters`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.domains.RetrieveRegisterParametersResponse: + Response for the ``RetrieveRegisterParameters`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{location=projects/*/locations/*}/registrations:retrieveRegisterParameters", + }, + ] + request, metadata = self._interceptor.pre_retrieve_register_parameters( + request, metadata + ) + pb_request = domains.RetrieveRegisterParametersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = domains.RetrieveRegisterParametersResponse() + pb_resp = domains.RetrieveRegisterParametersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_retrieve_register_parameters(resp) + return resp + + class _RetrieveTransferParameters(DomainsRestStub): + def __hash__(self): + return hash("RetrieveTransferParameters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "domainName": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.RetrieveTransferParametersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.RetrieveTransferParametersResponse: + r"""Call the retrieve transfer + parameters method over HTTP. + + Args: + request (~.domains.RetrieveTransferParametersRequest): + The request object. Request for the ``RetrieveTransferParameters`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.domains.RetrieveTransferParametersResponse: + Response for the ``RetrieveTransferParameters`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{location=projects/*/locations/*}/registrations:retrieveTransferParameters", + }, + ] + request, metadata = self._interceptor.pre_retrieve_transfer_parameters( + request, metadata + ) + pb_request = domains.RetrieveTransferParametersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = domains.RetrieveTransferParametersResponse() + pb_resp = domains.RetrieveTransferParametersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_retrieve_transfer_parameters(resp) + return resp + + class _SearchDomains(DomainsRestStub): + def __hash__(self): + return hash("SearchDomains") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "query": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.SearchDomainsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> domains.SearchDomainsResponse: + r"""Call the search domains method over HTTP. + + Args: + request (~.domains.SearchDomainsRequest): + The request object. Request for the ``SearchDomains`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.domains.SearchDomainsResponse: + Response for the ``SearchDomains`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{location=projects/*/locations/*}/registrations:searchDomains", + }, + ] + request, metadata = self._interceptor.pre_search_domains(request, metadata) + pb_request = domains.SearchDomainsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = domains.SearchDomainsResponse() + pb_resp = domains.SearchDomainsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_domains(resp) + return resp + + class _TransferDomain(DomainsRestStub): + def __hash__(self): + return hash("TransferDomain") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.TransferDomainRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the transfer domain method over HTTP. + + Args: + request (~.domains.TransferDomainRequest): + The request object. Request for the ``TransferDomain`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{parent=projects/*/locations/*}/registrations:transfer", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_transfer_domain(request, metadata) + pb_request = domains.TransferDomainRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_transfer_domain(resp) + return resp + + class _UpdateRegistration(DomainsRestStub): + def __hash__(self): + return hash("UpdateRegistration") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: domains.UpdateRegistrationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update registration method over HTTP. + + Args: + request (~.domains.UpdateRegistrationRequest): + The request object. Request for the ``UpdateRegistration`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta1/{registration.name=projects/*/locations/*/registrations/*}", + "body": "registration", + }, + ] + request, metadata = self._interceptor.pre_update_registration( + request, metadata + ) + pb_request = domains.UpdateRegistrationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_registration(resp) + return resp + + @property + def configure_contact_settings( + self, + ) -> Callable[[domains.ConfigureContactSettingsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ConfigureContactSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def configure_dns_settings( + self, + ) -> Callable[[domains.ConfigureDnsSettingsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ConfigureDnsSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def configure_management_settings( + self, + ) -> Callable[ + [domains.ConfigureManagementSettingsRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ConfigureManagementSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_registration( + self, + ) -> Callable[[domains.DeleteRegistrationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteRegistration(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_registration( + self, + ) -> Callable[[domains.ExportRegistrationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportRegistration(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_registration( + self, + ) -> Callable[[domains.GetRegistrationRequest], domains.Registration]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRegistration(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_registrations( + self, + ) -> Callable[ + [domains.ListRegistrationsRequest], domains.ListRegistrationsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRegistrations(self._session, self._host, self._interceptor) # type: ignore + + @property + def register_domain( + self, + ) -> Callable[[domains.RegisterDomainRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RegisterDomain(self._session, self._host, self._interceptor) # type: ignore + + @property + def reset_authorization_code( + self, + ) -> Callable[[domains.ResetAuthorizationCodeRequest], domains.AuthorizationCode]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ResetAuthorizationCode(self._session, self._host, self._interceptor) # type: ignore + + @property + def retrieve_authorization_code( + self, + ) -> Callable[ + [domains.RetrieveAuthorizationCodeRequest], domains.AuthorizationCode + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RetrieveAuthorizationCode(self._session, self._host, self._interceptor) # type: ignore + + @property + def retrieve_register_parameters( + self, + ) -> Callable[ + [domains.RetrieveRegisterParametersRequest], + domains.RetrieveRegisterParametersResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RetrieveRegisterParameters(self._session, self._host, self._interceptor) # type: ignore + + @property + def retrieve_transfer_parameters( + self, + ) -> Callable[ + [domains.RetrieveTransferParametersRequest], + domains.RetrieveTransferParametersResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RetrieveTransferParameters(self._session, self._host, self._interceptor) # type: ignore + + @property + def search_domains( + self, + ) -> Callable[[domains.SearchDomainsRequest], domains.SearchDomainsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchDomains(self._session, self._host, self._interceptor) # type: ignore + + @property + def transfer_domain( + self, + ) -> Callable[[domains.TransferDomainRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TransferDomain(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_registration( + self, + ) -> Callable[[domains.UpdateRegistrationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateRegistration(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DomainsRestTransport",) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/types/__init__.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/types/__init__.py new file mode 100644 index 000000000000..1afe08dc5219 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/types/__init__.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .domains import ( + AuthorizationCode, + ConfigureContactSettingsRequest, + ConfigureDnsSettingsRequest, + ConfigureManagementSettingsRequest, + ContactNotice, + ContactPrivacy, + ContactSettings, + DeleteRegistrationRequest, + DnsSettings, + DomainNotice, + ExportRegistrationRequest, + GetRegistrationRequest, + ListRegistrationsRequest, + ListRegistrationsResponse, + ManagementSettings, + OperationMetadata, + RegisterDomainRequest, + RegisterParameters, + Registration, + ResetAuthorizationCodeRequest, + RetrieveAuthorizationCodeRequest, + RetrieveRegisterParametersRequest, + RetrieveRegisterParametersResponse, + RetrieveTransferParametersRequest, + RetrieveTransferParametersResponse, + SearchDomainsRequest, + SearchDomainsResponse, + TransferDomainRequest, + TransferLockState, + TransferParameters, + UpdateRegistrationRequest, +) + +__all__ = ( + "AuthorizationCode", + "ConfigureContactSettingsRequest", + "ConfigureDnsSettingsRequest", + "ConfigureManagementSettingsRequest", + "ContactSettings", + "DeleteRegistrationRequest", + "DnsSettings", + "ExportRegistrationRequest", + "GetRegistrationRequest", + "ListRegistrationsRequest", + "ListRegistrationsResponse", + "ManagementSettings", + "OperationMetadata", + "RegisterDomainRequest", + "RegisterParameters", + "Registration", + "ResetAuthorizationCodeRequest", + "RetrieveAuthorizationCodeRequest", + "RetrieveRegisterParametersRequest", + "RetrieveRegisterParametersResponse", + "RetrieveTransferParametersRequest", + "RetrieveTransferParametersResponse", + "SearchDomainsRequest", + "SearchDomainsResponse", + "TransferDomainRequest", + "TransferParameters", + "UpdateRegistrationRequest", + "ContactNotice", + "ContactPrivacy", + "DomainNotice", + "TransferLockState", +) diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/types/domains.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/types/domains.py new file mode 100644 index 000000000000..bbcb82916920 --- /dev/null +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/types/domains.py @@ -0,0 +1,1526 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import money_pb2 # type: ignore +from google.type import postal_address_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.domains.v1beta1", + manifest={ + "ContactPrivacy", + "DomainNotice", + "ContactNotice", + "TransferLockState", + "Registration", + "ManagementSettings", + "DnsSettings", + "ContactSettings", + "SearchDomainsRequest", + "SearchDomainsResponse", + "RetrieveRegisterParametersRequest", + "RetrieveRegisterParametersResponse", + "RegisterDomainRequest", + "RetrieveTransferParametersRequest", + "RetrieveTransferParametersResponse", + "TransferDomainRequest", + "ListRegistrationsRequest", + "ListRegistrationsResponse", + "GetRegistrationRequest", + "UpdateRegistrationRequest", + "ConfigureManagementSettingsRequest", + "ConfigureDnsSettingsRequest", + "ConfigureContactSettingsRequest", + "ExportRegistrationRequest", + "DeleteRegistrationRequest", + "RetrieveAuthorizationCodeRequest", + "ResetAuthorizationCodeRequest", + "RegisterParameters", + "TransferParameters", + "AuthorizationCode", + "OperationMetadata", + }, +) + + +class ContactPrivacy(proto.Enum): + r"""Defines a set of possible contact privacy settings for a + ``Registration``. + + `ICANN `__ maintains the WHOIS database, a + publicly accessible mapping from domain name to contact information, + and requires that each domain name have an entry. Choose from these + options to control how much information in your ``ContactSettings`` + is published. + + Values: + CONTACT_PRIVACY_UNSPECIFIED (0): + The contact privacy settings are undefined. + PUBLIC_CONTACT_DATA (1): + All the data from ``ContactSettings`` is publicly available. + When setting this option, you must also provide a + ``PUBLIC_CONTACT_DATA_ACKNOWLEDGEMENT`` in the + ``contact_notices`` field of the request. + PRIVATE_CONTACT_DATA (2): + None of the data from ``ContactSettings`` is publicly + available. Instead, proxy contact data is published for your + domain. Email sent to the proxy email address is forwarded + to the registrant's email address. Cloud Domains provides + this privacy proxy service at no additional cost. + REDACTED_CONTACT_DATA (3): + Some data from ``ContactSettings`` is publicly available. + The actual information redacted depends on the domain. For + details, see `the registration privacy + article `__. + """ + CONTACT_PRIVACY_UNSPECIFIED = 0 + PUBLIC_CONTACT_DATA = 1 + PRIVATE_CONTACT_DATA = 2 + REDACTED_CONTACT_DATA = 3 + + +class DomainNotice(proto.Enum): + r"""Notices about special properties of certain domains. + + Values: + DOMAIN_NOTICE_UNSPECIFIED (0): + The notice is undefined. + HSTS_PRELOADED (1): + Indicates that the domain is preloaded on the HTTP Strict + Transport Security list in browsers. Serving a website on + such domain requires an SSL certificate. For details, see + `how to get an SSL + certificate `__. + """ + DOMAIN_NOTICE_UNSPECIFIED = 0 + HSTS_PRELOADED = 1 + + +class ContactNotice(proto.Enum): + r"""Notices related to contact information. + + Values: + CONTACT_NOTICE_UNSPECIFIED (0): + The notice is undefined. + PUBLIC_CONTACT_DATA_ACKNOWLEDGEMENT (1): + Required when setting the ``privacy`` field of + ``ContactSettings`` to ``PUBLIC_CONTACT_DATA``, which + exposes contact data publicly. + """ + CONTACT_NOTICE_UNSPECIFIED = 0 + PUBLIC_CONTACT_DATA_ACKNOWLEDGEMENT = 1 + + +class TransferLockState(proto.Enum): + r"""Possible states of a ``Registration``'s transfer lock. + + Values: + TRANSFER_LOCK_STATE_UNSPECIFIED (0): + The state is unspecified. + UNLOCKED (1): + The domain is unlocked and can be transferred + to another registrar. + LOCKED (2): + The domain is locked and cannot be + transferred to another registrar. + """ + TRANSFER_LOCK_STATE_UNSPECIFIED = 0 + UNLOCKED = 1 + LOCKED = 2 + + +class Registration(proto.Message): + r"""The ``Registration`` resource facilitates managing and configuring + domain name registrations. + + There are several ways to create a new ``Registration`` resource: + + To create a new ``Registration`` resource, find a suitable domain + name by calling the ``SearchDomains`` method with a query to see + available domain name options. After choosing a name, call + ``RetrieveRegisterParameters`` to ensure availability and obtain + information like pricing, which is needed to build a call to + ``RegisterDomain``. + + Another way to create a new ``Registration`` is to transfer an + existing domain from another registrar. First, go to the current + registrar to unlock the domain for transfer and retrieve the + domain's transfer authorization code. Then call + ``RetrieveTransferParameters`` to confirm that the domain is + unlocked and to get values needed to build a call to + ``TransferDomain``. + + Attributes: + name (str): + Output only. Name of the ``Registration`` resource, in the + format + ``projects/*/locations/*/registrations/``. + domain_name (str): + Required. Immutable. The domain name. Unicode + domain names must be expressed in Punycode + format. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the ``Registration`` + resource. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The expiration timestamp of the + ``Registration``. + state (google.cloud.domains_v1beta1.types.Registration.State): + Output only. The state of the ``Registration`` + issues (MutableSequence[google.cloud.domains_v1beta1.types.Registration.Issue]): + Output only. The set of issues with the ``Registration`` + that require attention. + labels (MutableMapping[str, str]): + Set of labels associated with the ``Registration``. + management_settings (google.cloud.domains_v1beta1.types.ManagementSettings): + Settings for management of the ``Registration``, including + renewal, billing, and transfer. You cannot update these with + the ``UpdateRegistration`` method. To update these settings, + use the ``ConfigureManagementSettings`` method. + dns_settings (google.cloud.domains_v1beta1.types.DnsSettings): + Settings controlling the DNS configuration of the + ``Registration``. You cannot update these with the + ``UpdateRegistration`` method. To update these settings, use + the ``ConfigureDnsSettings`` method. + contact_settings (google.cloud.domains_v1beta1.types.ContactSettings): + Required. Settings for contact information linked to the + ``Registration``. You cannot update these with the + ``UpdateRegistration`` method. To update these settings, use + the ``ConfigureContactSettings`` method. + pending_contact_settings (google.cloud.domains_v1beta1.types.ContactSettings): + Output only. Pending contact settings for the + ``Registration``. Updates to the ``contact_settings`` field + that change its ``registrant_contact`` or ``privacy`` fields + require email confirmation by the ``registrant_contact`` + before taking effect. This field is set only if there are + pending updates to the ``contact_settings`` that have not + been confirmed. To confirm the changes, the + ``registrant_contact`` must follow the instructions in the + email they receive. + supported_privacy (MutableSequence[google.cloud.domains_v1beta1.types.ContactPrivacy]): + Output only. Set of options for the + ``contact_settings.privacy`` field that this + ``Registration`` supports. + """ + + class State(proto.Enum): + r"""Possible states of a ``Registration``. + + Values: + STATE_UNSPECIFIED (0): + The state is undefined. + REGISTRATION_PENDING (1): + The domain is being registered. + REGISTRATION_FAILED (2): + The domain registration failed. You can + delete resources in this state to allow + registration to be retried. + TRANSFER_PENDING (3): + The domain is being transferred from another + registrar to Cloud Domains. + TRANSFER_FAILED (4): + The attempt to transfer the domain from + another registrar to Cloud Domains failed. You + can delete resources in this state and retry the + transfer. + ACTIVE (6): + The domain is registered and operational. The + domain renews automatically as long as it + remains in this state. + SUSPENDED (7): + The domain is suspended and inoperative. For more details, + see the ``issues`` field. + EXPORTED (8): + The domain is no longer managed with Cloud Domains. It may + have been transferred to another registrar or exported for + management in `Google Domains `__. + You can no longer update it with this API, and information + shown about it may be stale. Domains in this state are not + automatically renewed by Cloud Domains. + """ + STATE_UNSPECIFIED = 0 + REGISTRATION_PENDING = 1 + REGISTRATION_FAILED = 2 + TRANSFER_PENDING = 3 + TRANSFER_FAILED = 4 + ACTIVE = 6 + SUSPENDED = 7 + EXPORTED = 8 + + class Issue(proto.Enum): + r"""Possible issues with a ``Registration`` that require attention. + + Values: + ISSUE_UNSPECIFIED (0): + The issue is undefined. + CONTACT_SUPPORT (1): + Contact the Cloud Support team to resolve a + problem with this domain. + UNVERIFIED_EMAIL (2): + `ICANN `__ requires verification of the + email address in the ``Registration``'s + ``contact_settings.registrant_contact`` field. To verify the + email address, follow the instructions in the email the + ``registrant_contact`` receives following registration. If + you do not complete email verification within 15 days of + registration, the domain is suspended. To resend the + verification email, call ConfigureContactSettings and + provide the current ``registrant_contact.email``. + """ + ISSUE_UNSPECIFIED = 0 + CONTACT_SUPPORT = 1 + UNVERIFIED_EMAIL = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + domain_name: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + issues: MutableSequence[Issue] = proto.RepeatedField( + proto.ENUM, + number=8, + enum=Issue, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) + management_settings: "ManagementSettings" = proto.Field( + proto.MESSAGE, + number=10, + message="ManagementSettings", + ) + dns_settings: "DnsSettings" = proto.Field( + proto.MESSAGE, + number=11, + message="DnsSettings", + ) + contact_settings: "ContactSettings" = proto.Field( + proto.MESSAGE, + number=12, + message="ContactSettings", + ) + pending_contact_settings: "ContactSettings" = proto.Field( + proto.MESSAGE, + number=13, + message="ContactSettings", + ) + supported_privacy: MutableSequence["ContactPrivacy"] = proto.RepeatedField( + proto.ENUM, + number=14, + enum="ContactPrivacy", + ) + + +class ManagementSettings(proto.Message): + r"""Defines renewal, billing, and transfer settings for a + ``Registration``. + + Attributes: + renewal_method (google.cloud.domains_v1beta1.types.ManagementSettings.RenewalMethod): + Output only. The renewal method for this ``Registration``. + transfer_lock_state (google.cloud.domains_v1beta1.types.TransferLockState): + Controls whether the domain can be + transferred to another registrar. + """ + + class RenewalMethod(proto.Enum): + r"""Defines how the ``Registration`` is renewed. + + Values: + RENEWAL_METHOD_UNSPECIFIED (0): + The renewal method is undefined. + AUTOMATIC_RENEWAL (1): + The domain is automatically renewed each year . + + To disable automatic renewals, delete the resource by + calling ``DeleteRegistration`` or export it by calling + ``ExportRegistration``. + MANUAL_RENEWAL (2): + The domain must be explicitly renewed each year before its + ``expire_time``. This option is only available when the + ``Registration`` is in state ``EXPORTED``. + + To manage the domain's current billing and renewal settings, + go to `Google Domains `__. + """ + RENEWAL_METHOD_UNSPECIFIED = 0 + AUTOMATIC_RENEWAL = 1 + MANUAL_RENEWAL = 2 + + renewal_method: RenewalMethod = proto.Field( + proto.ENUM, + number=3, + enum=RenewalMethod, + ) + transfer_lock_state: "TransferLockState" = proto.Field( + proto.ENUM, + number=4, + enum="TransferLockState", + ) + + +class DnsSettings(proto.Message): + r"""Defines the DNS configuration of a ``Registration``, including name + servers, DNSSEC, and glue records. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + custom_dns (google.cloud.domains_v1beta1.types.DnsSettings.CustomDns): + An arbitrary DNS provider identified by its + name servers. + + This field is a member of `oneof`_ ``dns_provider``. + google_domains_dns (google.cloud.domains_v1beta1.types.DnsSettings.GoogleDomainsDns): + The free DNS zone provided by `Google + Domains `__. + + This field is a member of `oneof`_ ``dns_provider``. + glue_records (MutableSequence[google.cloud.domains_v1beta1.types.DnsSettings.GlueRecord]): + The list of glue records for this ``Registration``. Commonly + empty. + """ + + class DsState(proto.Enum): + r"""The publication state of DS records for a ``Registration``. + + Values: + DS_STATE_UNSPECIFIED (0): + DS state is unspecified. + DS_RECORDS_UNPUBLISHED (1): + DNSSEC is disabled for this domain. No DS + records for this domain are published in the + parent DNS zone. + DS_RECORDS_PUBLISHED (2): + DNSSEC is enabled for this domain. Appropriate DS records + for this domain are published in the parent DNS zone. This + option is valid only if the DNS zone referenced in the + ``Registration``'s ``dns_provider`` field is already + DNSSEC-signed. + """ + DS_STATE_UNSPECIFIED = 0 + DS_RECORDS_UNPUBLISHED = 1 + DS_RECORDS_PUBLISHED = 2 + + class CustomDns(proto.Message): + r"""Configuration for an arbitrary DNS provider. + + Attributes: + name_servers (MutableSequence[str]): + Required. A list of name servers that store + the DNS zone for this domain. Each name server + is a domain name, with Unicode domain names + expressed in Punycode format. + ds_records (MutableSequence[google.cloud.domains_v1beta1.types.DnsSettings.DsRecord]): + The list of DS records for this domain, which + are used to enable DNSSEC. The domain's DNS + provider can provide the values to set here. If + this field is empty, DNSSEC is disabled. + """ + + name_servers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + ds_records: MutableSequence["DnsSettings.DsRecord"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="DnsSettings.DsRecord", + ) + + class GoogleDomainsDns(proto.Message): + r"""Configuration for using the free DNS zone provided by Google Domains + as a ``Registration``'s ``dns_provider``. You cannot configure the + DNS zone itself using the API. To configure the DNS zone, go to + `Google Domains `__. + + Attributes: + name_servers (MutableSequence[str]): + Output only. A list of name servers that + store the DNS zone for this domain. Each name + server is a domain name, with Unicode domain + names expressed in Punycode format. This field + is automatically populated with the name servers + assigned to the Google Domains DNS zone. + ds_state (google.cloud.domains_v1beta1.types.DnsSettings.DsState): + Required. The state of DS records for this + domain. Used to enable or disable automatic + DNSSEC. + ds_records (MutableSequence[google.cloud.domains_v1beta1.types.DnsSettings.DsRecord]): + Output only. The list of DS records published for this + domain. The list is automatically populated when + ``ds_state`` is ``DS_RECORDS_PUBLISHED``, otherwise it + remains empty. + """ + + name_servers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + ds_state: "DnsSettings.DsState" = proto.Field( + proto.ENUM, + number=2, + enum="DnsSettings.DsState", + ) + ds_records: MutableSequence["DnsSettings.DsRecord"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="DnsSettings.DsRecord", + ) + + class DsRecord(proto.Message): + r"""Defines a Delegation Signer (DS) record, which is needed to + enable DNSSEC for a domain. It contains a digest (hash) of a + DNSKEY record that must be present in the domain's DNS zone. + + Attributes: + key_tag (int): + The key tag of the record. Must be set in + range 0 -- 65535. + algorithm (google.cloud.domains_v1beta1.types.DnsSettings.DsRecord.Algorithm): + The algorithm used to generate the referenced + DNSKEY. + digest_type (google.cloud.domains_v1beta1.types.DnsSettings.DsRecord.DigestType): + The hash function used to generate the digest + of the referenced DNSKEY. + digest (str): + The digest generated from the referenced + DNSKEY. + """ + + class Algorithm(proto.Enum): + r"""List of algorithms used to create a DNSKEY. Certain + algorithms are not supported for particular domains. + + Values: + ALGORITHM_UNSPECIFIED (0): + The algorithm is unspecified. + RSAMD5 (1): + RSA/MD5. Cannot be used for new deployments. + DH (2): + Diffie-Hellman. Cannot be used for new + deployments. + DSA (3): + DSA/SHA1. Not recommended for new + deployments. + ECC (4): + ECC. Not recommended for new deployments. + RSASHA1 (5): + RSA/SHA-1. Not recommended for new + deployments. + DSANSEC3SHA1 (6): + DSA-NSEC3-SHA1. Not recommended for new + deployments. + RSASHA1NSEC3SHA1 (7): + RSA/SHA1-NSEC3-SHA1. Not recommended for new + deployments. + RSASHA256 (8): + RSA/SHA-256. + RSASHA512 (10): + RSA/SHA-512. + ECCGOST (12): + GOST R 34.10-2001. + ECDSAP256SHA256 (13): + ECDSA Curve P-256 with SHA-256. + ECDSAP384SHA384 (14): + ECDSA Curve P-384 with SHA-384. + ED25519 (15): + Ed25519. + ED448 (16): + Ed448. + INDIRECT (252): + Reserved for Indirect Keys. Cannot be used + for new deployments. + PRIVATEDNS (253): + Private algorithm. Cannot be used for new + deployments. + PRIVATEOID (254): + Private algorithm OID. Cannot be used for new + deployments. + """ + ALGORITHM_UNSPECIFIED = 0 + RSAMD5 = 1 + DH = 2 + DSA = 3 + ECC = 4 + RSASHA1 = 5 + DSANSEC3SHA1 = 6 + RSASHA1NSEC3SHA1 = 7 + RSASHA256 = 8 + RSASHA512 = 10 + ECCGOST = 12 + ECDSAP256SHA256 = 13 + ECDSAP384SHA384 = 14 + ED25519 = 15 + ED448 = 16 + INDIRECT = 252 + PRIVATEDNS = 253 + PRIVATEOID = 254 + + class DigestType(proto.Enum): + r"""List of hash functions that may have been used to generate a + digest of a DNSKEY. + + Values: + DIGEST_TYPE_UNSPECIFIED (0): + The DigestType is unspecified. + SHA1 (1): + SHA-1. Not recommended for new deployments. + SHA256 (2): + SHA-256. + GOST3411 (3): + GOST R 34.11-94. + SHA384 (4): + SHA-384. + """ + DIGEST_TYPE_UNSPECIFIED = 0 + SHA1 = 1 + SHA256 = 2 + GOST3411 = 3 + SHA384 = 4 + + key_tag: int = proto.Field( + proto.INT32, + number=1, + ) + algorithm: "DnsSettings.DsRecord.Algorithm" = proto.Field( + proto.ENUM, + number=2, + enum="DnsSettings.DsRecord.Algorithm", + ) + digest_type: "DnsSettings.DsRecord.DigestType" = proto.Field( + proto.ENUM, + number=3, + enum="DnsSettings.DsRecord.DigestType", + ) + digest: str = proto.Field( + proto.STRING, + number=4, + ) + + class GlueRecord(proto.Message): + r"""Defines a host on your domain that is a DNS name server for your + domain and/or other domains. Glue records are a way of making the IP + address of a name server known, even when it serves DNS queries for + its parent domain. For example, when ``ns.example.com`` is a name + server for ``example.com``, the host ``ns.example.com`` must have a + glue record to break the circular DNS reference. + + Attributes: + host_name (str): + Required. Domain name of the host in Punycode + format. + ipv4_addresses (MutableSequence[str]): + List of IPv4 addresses corresponding to this host in the + standard decimal format (e.g. ``198.51.100.1``). At least + one of ``ipv4_address`` and ``ipv6_address`` must be set. + ipv6_addresses (MutableSequence[str]): + List of IPv6 addresses corresponding to this host in the + standard hexadecimal format (e.g. ``2001:db8::``). At least + one of ``ipv4_address`` and ``ipv6_address`` must be set. + """ + + host_name: str = proto.Field( + proto.STRING, + number=1, + ) + ipv4_addresses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + ipv6_addresses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + custom_dns: CustomDns = proto.Field( + proto.MESSAGE, + number=1, + oneof="dns_provider", + message=CustomDns, + ) + google_domains_dns: GoogleDomainsDns = proto.Field( + proto.MESSAGE, + number=2, + oneof="dns_provider", + message=GoogleDomainsDns, + ) + glue_records: MutableSequence[GlueRecord] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=GlueRecord, + ) + + +class ContactSettings(proto.Message): + r"""Defines the contact information associated with a ``Registration``. + + `ICANN `__ requires all domain names to have + associated contact information. The ``registrant_contact`` is + considered the domain's legal owner, and often the other contacts + are identical. + + Attributes: + privacy (google.cloud.domains_v1beta1.types.ContactPrivacy): + Required. Privacy setting for the contacts associated with + the ``Registration``. + registrant_contact (google.cloud.domains_v1beta1.types.ContactSettings.Contact): + Required. The registrant contact for the ``Registration``. + + *Caution: Anyone with access to this email address, phone + number, and/or postal address can take control of the + domain.* + + *Warning: For new ``Registration``\ s, the registrant + receives an email confirmation that they must complete + within 15 days to avoid domain suspension.* + admin_contact (google.cloud.domains_v1beta1.types.ContactSettings.Contact): + Required. The administrative contact for the + ``Registration``. + technical_contact (google.cloud.domains_v1beta1.types.ContactSettings.Contact): + Required. The technical contact for the ``Registration``. + """ + + class Contact(proto.Message): + r"""Details required for a contact associated with a ``Registration``. + + Attributes: + postal_address (google.type.postal_address_pb2.PostalAddress): + Required. Postal address of the contact. + email (str): + Required. Email address of the contact. + phone_number (str): + Required. Phone number of the contact in international + format. For example, ``"+1-800-555-0123"``. + fax_number (str): + Fax number of the contact in international format. For + example, ``"+1-800-555-0123"``. + """ + + postal_address: postal_address_pb2.PostalAddress = proto.Field( + proto.MESSAGE, + number=1, + message=postal_address_pb2.PostalAddress, + ) + email: str = proto.Field( + proto.STRING, + number=2, + ) + phone_number: str = proto.Field( + proto.STRING, + number=3, + ) + fax_number: str = proto.Field( + proto.STRING, + number=4, + ) + + privacy: "ContactPrivacy" = proto.Field( + proto.ENUM, + number=1, + enum="ContactPrivacy", + ) + registrant_contact: Contact = proto.Field( + proto.MESSAGE, + number=2, + message=Contact, + ) + admin_contact: Contact = proto.Field( + proto.MESSAGE, + number=3, + message=Contact, + ) + technical_contact: Contact = proto.Field( + proto.MESSAGE, + number=4, + message=Contact, + ) + + +class SearchDomainsRequest(proto.Message): + r"""Request for the ``SearchDomains`` method. + + Attributes: + query (str): + Required. String used to search for available + domain names. + location (str): + Required. The location. Must be in the format + ``projects/*/locations/*``. + """ + + query: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SearchDomainsResponse(proto.Message): + r"""Response for the ``SearchDomains`` method. + + Attributes: + register_parameters (MutableSequence[google.cloud.domains_v1beta1.types.RegisterParameters]): + Results of the domain name search. + """ + + register_parameters: MutableSequence["RegisterParameters"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="RegisterParameters", + ) + + +class RetrieveRegisterParametersRequest(proto.Message): + r"""Request for the ``RetrieveRegisterParameters`` method. + + Attributes: + domain_name (str): + Required. The domain name. Unicode domain + names must be expressed in Punycode format. + location (str): + Required. The location. Must be in the format + ``projects/*/locations/*``. + """ + + domain_name: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RetrieveRegisterParametersResponse(proto.Message): + r"""Response for the ``RetrieveRegisterParameters`` method. + + Attributes: + register_parameters (google.cloud.domains_v1beta1.types.RegisterParameters): + Parameters to use when calling the ``RegisterDomain`` + method. + """ + + register_parameters: "RegisterParameters" = proto.Field( + proto.MESSAGE, + number=1, + message="RegisterParameters", + ) + + +class RegisterDomainRequest(proto.Message): + r"""Request for the ``RegisterDomain`` method. + + Attributes: + parent (str): + Required. The parent resource of the ``Registration``. Must + be in the format ``projects/*/locations/*``. + registration (google.cloud.domains_v1beta1.types.Registration): + Required. The complete ``Registration`` resource to be + created. + domain_notices (MutableSequence[google.cloud.domains_v1beta1.types.DomainNotice]): + The list of domain notices that you acknowledge. Call + ``RetrieveRegisterParameters`` to see the notices that need + acknowledgement. + contact_notices (MutableSequence[google.cloud.domains_v1beta1.types.ContactNotice]): + The list of contact notices that the caller acknowledges. + The notices needed here depend on the values specified in + ``registration.contact_settings``. + yearly_price (google.type.money_pb2.Money): + Required. Yearly price to register or renew + the domain. The value that should be put here + can be obtained from RetrieveRegisterParameters + or SearchDomains calls. + validate_only (bool): + When true, only validation is performed, without actually + registering the domain. Follows: + https://cloud.google.com/apis/design/design_patterns#request_validation + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + registration: "Registration" = proto.Field( + proto.MESSAGE, + number=2, + message="Registration", + ) + domain_notices: MutableSequence["DomainNotice"] = proto.RepeatedField( + proto.ENUM, + number=3, + enum="DomainNotice", + ) + contact_notices: MutableSequence["ContactNotice"] = proto.RepeatedField( + proto.ENUM, + number=4, + enum="ContactNotice", + ) + yearly_price: money_pb2.Money = proto.Field( + proto.MESSAGE, + number=5, + message=money_pb2.Money, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class RetrieveTransferParametersRequest(proto.Message): + r"""Request for the ``RetrieveTransferParameters`` method. + + Attributes: + domain_name (str): + Required. The domain name. Unicode domain + names must be expressed in Punycode format. + location (str): + Required. The location. Must be in the format + ``projects/*/locations/*``. + """ + + domain_name: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RetrieveTransferParametersResponse(proto.Message): + r"""Response for the ``RetrieveTransferParameters`` method. + + Attributes: + transfer_parameters (google.cloud.domains_v1beta1.types.TransferParameters): + Parameters to use when calling the ``TransferDomain`` + method. + """ + + transfer_parameters: "TransferParameters" = proto.Field( + proto.MESSAGE, + number=1, + message="TransferParameters", + ) + + +class TransferDomainRequest(proto.Message): + r"""Request for the ``TransferDomain`` method. + + Attributes: + parent (str): + Required. The parent resource of the ``Registration``. Must + be in the format ``projects/*/locations/*``. + registration (google.cloud.domains_v1beta1.types.Registration): + Required. The complete ``Registration`` resource to be + created. + + You can leave ``registration.dns_settings`` unset to import + the domain's current DNS configuration from its current + registrar. Use this option only if you are sure that the + domain's current DNS service does not cease upon transfer, + as is often the case for DNS services provided for free by + the registrar. + contact_notices (MutableSequence[google.cloud.domains_v1beta1.types.ContactNotice]): + The list of contact notices that you acknowledge. The + notices needed here depend on the values specified in + ``registration.contact_settings``. + yearly_price (google.type.money_pb2.Money): + Required. Acknowledgement of the price to transfer or renew + the domain for one year. Call ``RetrieveTransferParameters`` + to obtain the price, which you must acknowledge. + authorization_code (google.cloud.domains_v1beta1.types.AuthorizationCode): + The domain's transfer authorization code. You + can obtain this from the domain's current + registrar. + validate_only (bool): + Validate the request without actually + transferring the domain. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + registration: "Registration" = proto.Field( + proto.MESSAGE, + number=2, + message="Registration", + ) + contact_notices: MutableSequence["ContactNotice"] = proto.RepeatedField( + proto.ENUM, + number=3, + enum="ContactNotice", + ) + yearly_price: money_pb2.Money = proto.Field( + proto.MESSAGE, + number=4, + message=money_pb2.Money, + ) + authorization_code: "AuthorizationCode" = proto.Field( + proto.MESSAGE, + number=5, + message="AuthorizationCode", + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class ListRegistrationsRequest(proto.Message): + r"""Request for the ``ListRegistrations`` method. + + Attributes: + parent (str): + Required. The project and location from which to list + ``Registration``\ s, specified in the format + ``projects/*/locations/*``. + page_size (int): + Maximum number of results to return. + page_token (str): + When set to the ``next_page_token`` from a prior response, + provides the next page of results. + filter (str): + Filter expression to restrict the ``Registration``\ s + returned. + + The expression must specify the field name, a comparison + operator, and the value that you want to use for filtering. + The value must be a string, a number, a boolean, or an enum + value. The comparison operator should be one of =, !=, >, <, + >=, <=, or : for prefix or wildcard matches. + + For example, to filter to a specific domain name, use an + expression like ``domainName="example.com"``. You can also + check for the existence of a field; for example, to find + domains using custom DNS settings, use an expression like + ``dnsSettings.customDns:*``. + + You can also create compound filters by combining + expressions with the ``AND`` and ``OR`` operators. For + example, to find domains that are suspended or have specific + issues flagged, use an expression like + ``(state=SUSPENDED) OR (issue:*)``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListRegistrationsResponse(proto.Message): + r"""Response for the ``ListRegistrations`` method. + + Attributes: + registrations (MutableSequence[google.cloud.domains_v1beta1.types.Registration]): + A list of ``Registration``\ s. + next_page_token (str): + When present, there are more results to retrieve. Set + ``page_token`` to this value on a subsequent call to get the + next page of results. + """ + + @property + def raw_page(self): + return self + + registrations: MutableSequence["Registration"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Registration", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetRegistrationRequest(proto.Message): + r"""Request for the ``GetRegistration`` method. + + Attributes: + name (str): + Required. The name of the ``Registration`` to get, in the + format ``projects/*/locations/*/registrations/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateRegistrationRequest(proto.Message): + r"""Request for the ``UpdateRegistration`` method. + + Attributes: + registration (google.cloud.domains_v1beta1.types.Registration): + Fields of the ``Registration`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to update + as a comma-separated list. For example, if only the labels + are being updated, the ``update_mask`` is ``"labels"``. + """ + + registration: "Registration" = proto.Field( + proto.MESSAGE, + number=1, + message="Registration", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ConfigureManagementSettingsRequest(proto.Message): + r"""Request for the ``ConfigureManagementSettings`` method. + + Attributes: + registration (str): + Required. The name of the ``Registration`` whose management + settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + management_settings (google.cloud.domains_v1beta1.types.ManagementSettings): + Fields of the ``ManagementSettings`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to update + as a comma-separated list. For example, if only the transfer + lock is being updated, the ``update_mask`` is + ``"transfer_lock_state"``. + """ + + registration: str = proto.Field( + proto.STRING, + number=1, + ) + management_settings: "ManagementSettings" = proto.Field( + proto.MESSAGE, + number=2, + message="ManagementSettings", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class ConfigureDnsSettingsRequest(proto.Message): + r"""Request for the ``ConfigureDnsSettings`` method. + + Attributes: + registration (str): + Required. The name of the ``Registration`` whose DNS + settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + dns_settings (google.cloud.domains_v1beta1.types.DnsSettings): + Fields of the ``DnsSettings`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to update + as a comma-separated list. For example, if only the name + servers are being updated for an existing Custom DNS + configuration, the ``update_mask`` is + ``"custom_dns.name_servers"``. + + When changing the DNS provider from one type to another, + pass the new provider's field name as part of the field + mask. For example, when changing from a Google Domains DNS + configuration to a Custom DNS configuration, the + ``update_mask`` is ``"custom_dns"``. // + validate_only (bool): + Validate the request without actually + updating the DNS settings. + """ + + registration: str = proto.Field( + proto.STRING, + number=1, + ) + dns_settings: "DnsSettings" = proto.Field( + proto.MESSAGE, + number=2, + message="DnsSettings", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class ConfigureContactSettingsRequest(proto.Message): + r"""Request for the ``ConfigureContactSettings`` method. + + Attributes: + registration (str): + Required. The name of the ``Registration`` whose contact + settings are being updated, in the format + ``projects/*/locations/*/registrations/*``. + contact_settings (google.cloud.domains_v1beta1.types.ContactSettings): + Fields of the ``ContactSettings`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The field mask describing which fields to update + as a comma-separated list. For example, if only the + registrant contact is being updated, the ``update_mask`` is + ``"registrant_contact"``. + contact_notices (MutableSequence[google.cloud.domains_v1beta1.types.ContactNotice]): + The list of contact notices that the caller acknowledges. + The notices needed here depend on the values specified in + ``contact_settings``. + validate_only (bool): + Validate the request without actually + updating the contact settings. + """ + + registration: str = proto.Field( + proto.STRING, + number=1, + ) + contact_settings: "ContactSettings" = proto.Field( + proto.MESSAGE, + number=2, + message="ContactSettings", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + contact_notices: MutableSequence["ContactNotice"] = proto.RepeatedField( + proto.ENUM, + number=4, + enum="ContactNotice", + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class ExportRegistrationRequest(proto.Message): + r"""Request for the ``ExportRegistration`` method. + + Attributes: + name (str): + Required. The name of the ``Registration`` to export, in the + format ``projects/*/locations/*/registrations/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteRegistrationRequest(proto.Message): + r"""Request for the ``DeleteRegistration`` method. + + Attributes: + name (str): + Required. The name of the ``Registration`` to delete, in the + format ``projects/*/locations/*/registrations/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RetrieveAuthorizationCodeRequest(proto.Message): + r"""Request for the ``RetrieveAuthorizationCode`` method. + + Attributes: + registration (str): + Required. The name of the ``Registration`` whose + authorization code is being retrieved, in the format + ``projects/*/locations/*/registrations/*``. + """ + + registration: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ResetAuthorizationCodeRequest(proto.Message): + r"""Request for the ``ResetAuthorizationCode`` method. + + Attributes: + registration (str): + Required. The name of the ``Registration`` whose + authorization code is being reset, in the format + ``projects/*/locations/*/registrations/*``. + """ + + registration: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RegisterParameters(proto.Message): + r"""Parameters required to register a new domain. + + Attributes: + domain_name (str): + The domain name. Unicode domain names are + expressed in Punycode format. + availability (google.cloud.domains_v1beta1.types.RegisterParameters.Availability): + Indicates whether the domain is available for registration. + This value is accurate when obtained by calling + ``RetrieveRegisterParameters``, but is approximate when + obtained by calling ``SearchDomains``. + supported_privacy (MutableSequence[google.cloud.domains_v1beta1.types.ContactPrivacy]): + Contact privacy options that the domain + supports. + domain_notices (MutableSequence[google.cloud.domains_v1beta1.types.DomainNotice]): + Notices about special properties of the + domain. + yearly_price (google.type.money_pb2.Money): + Price to register or renew the domain for one + year. + """ + + class Availability(proto.Enum): + r"""Possible availability states of a domain name. + + Values: + AVAILABILITY_UNSPECIFIED (0): + The availability is unspecified. + AVAILABLE (1): + The domain is available for registration. + UNAVAILABLE (2): + The domain is not available for registration. + Generally this means it is already registered to + another party. + UNSUPPORTED (3): + The domain is not currently supported by + Cloud Domains, but may be available elsewhere. + UNKNOWN (4): + Cloud Domains is unable to determine domain + availability, generally due to system + maintenance at the domain name registry. + """ + AVAILABILITY_UNSPECIFIED = 0 + AVAILABLE = 1 + UNAVAILABLE = 2 + UNSUPPORTED = 3 + UNKNOWN = 4 + + domain_name: str = proto.Field( + proto.STRING, + number=1, + ) + availability: Availability = proto.Field( + proto.ENUM, + number=2, + enum=Availability, + ) + supported_privacy: MutableSequence["ContactPrivacy"] = proto.RepeatedField( + proto.ENUM, + number=3, + enum="ContactPrivacy", + ) + domain_notices: MutableSequence["DomainNotice"] = proto.RepeatedField( + proto.ENUM, + number=4, + enum="DomainNotice", + ) + yearly_price: money_pb2.Money = proto.Field( + proto.MESSAGE, + number=5, + message=money_pb2.Money, + ) + + +class TransferParameters(proto.Message): + r"""Parameters required to transfer a domain from another + registrar. + + Attributes: + domain_name (str): + The domain name. Unicode domain names are + expressed in Punycode format. + current_registrar (str): + The registrar that currently manages the + domain. + name_servers (MutableSequence[str]): + The name servers that currently store the + configuration of the domain. + transfer_lock_state (google.cloud.domains_v1beta1.types.TransferLockState): + Indicates whether the domain is protected by a transfer + lock. For a transfer to succeed, this must show + ``UNLOCKED``. To unlock a domain, go to its current + registrar. + supported_privacy (MutableSequence[google.cloud.domains_v1beta1.types.ContactPrivacy]): + Contact privacy options that the domain + supports. + yearly_price (google.type.money_pb2.Money): + Price to transfer or renew the domain for one + year. + """ + + domain_name: str = proto.Field( + proto.STRING, + number=1, + ) + current_registrar: str = proto.Field( + proto.STRING, + number=2, + ) + name_servers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + transfer_lock_state: "TransferLockState" = proto.Field( + proto.ENUM, + number=4, + enum="TransferLockState", + ) + supported_privacy: MutableSequence["ContactPrivacy"] = proto.RepeatedField( + proto.ENUM, + number=5, + enum="ContactPrivacy", + ) + yearly_price: money_pb2.Money = proto.Field( + proto.MESSAGE, + number=6, + message=money_pb2.Money, + ) + + +class AuthorizationCode(proto.Message): + r"""Defines an authorization code. + + Attributes: + code (str): + The Authorization Code in ASCII. It can be + used to transfer the domain to or from another + registrar. + """ + + code: str = proto.Field( + proto.STRING, + number=1, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. Output + only. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation was created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation finished running. + target (str): + Server-defined resource path for the target + of the operation. + verb (str): + Name of the verb executed by the operation. + status_detail (str): + Human-readable status of the operation, if + any. + api_version (str): + API version used to start the operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_detail: str = proto.Field( + proto.STRING, + number=5, + ) + api_version: str = proto.Field( + proto.STRING, + number=6, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-domains/mypy.ini b/packages/google-cloud-domains/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-domains/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-domains/noxfile.py b/packages/google-cloud-domains/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-cloud-domains/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-cloud-domains/renovate.json b/packages/google-cloud-domains/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-domains/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-domains/scripts/decrypt-secrets.sh b/packages/google-cloud-domains/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-cloud-domains/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-domains/scripts/fixup_domains_v1_keywords.py b/packages/google-cloud-domains/scripts/fixup_domains_v1_keywords.py new file mode 100644 index 000000000000..39c812d2590d --- /dev/null +++ b/packages/google-cloud-domains/scripts/fixup_domains_v1_keywords.py @@ -0,0 +1,190 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class domainsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'configure_contact_settings': ('registration', 'update_mask', 'contact_settings', 'contact_notices', 'validate_only', ), + 'configure_dns_settings': ('registration', 'update_mask', 'dns_settings', 'validate_only', ), + 'configure_management_settings': ('registration', 'update_mask', 'management_settings', ), + 'delete_registration': ('name', ), + 'export_registration': ('name', ), + 'get_registration': ('name', ), + 'list_registrations': ('parent', 'page_size', 'page_token', 'filter', ), + 'register_domain': ('parent', 'registration', 'yearly_price', 'domain_notices', 'contact_notices', 'validate_only', ), + 'reset_authorization_code': ('registration', ), + 'retrieve_authorization_code': ('registration', ), + 'retrieve_register_parameters': ('domain_name', 'location', ), + 'retrieve_transfer_parameters': ('domain_name', 'location', ), + 'search_domains': ('query', 'location', ), + 'transfer_domain': ('parent', 'registration', 'yearly_price', 'contact_notices', 'authorization_code', 'validate_only', ), + 'update_registration': ('update_mask', 'registration', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=domainsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the domains client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-domains/scripts/fixup_domains_v1beta1_keywords.py b/packages/google-cloud-domains/scripts/fixup_domains_v1beta1_keywords.py new file mode 100644 index 000000000000..39c812d2590d --- /dev/null +++ b/packages/google-cloud-domains/scripts/fixup_domains_v1beta1_keywords.py @@ -0,0 +1,190 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class domainsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'configure_contact_settings': ('registration', 'update_mask', 'contact_settings', 'contact_notices', 'validate_only', ), + 'configure_dns_settings': ('registration', 'update_mask', 'dns_settings', 'validate_only', ), + 'configure_management_settings': ('registration', 'update_mask', 'management_settings', ), + 'delete_registration': ('name', ), + 'export_registration': ('name', ), + 'get_registration': ('name', ), + 'list_registrations': ('parent', 'page_size', 'page_token', 'filter', ), + 'register_domain': ('parent', 'registration', 'yearly_price', 'domain_notices', 'contact_notices', 'validate_only', ), + 'reset_authorization_code': ('registration', ), + 'retrieve_authorization_code': ('registration', ), + 'retrieve_register_parameters': ('domain_name', 'location', ), + 'retrieve_transfer_parameters': ('domain_name', 'location', ), + 'search_domains': ('query', 'location', ), + 'transfer_domain': ('parent', 'registration', 'yearly_price', 'contact_notices', 'authorization_code', 'validate_only', ), + 'update_registration': ('update_mask', 'registration', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=domainsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the domains client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-domains/scripts/readme-gen/readme_gen.py b/packages/google-cloud-domains/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..91b59676bfc7 --- /dev/null +++ b/packages/google-cloud-domains/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template('README.tmpl.rst') + + +def get_help(file): + return subprocess.check_output(['python', file, '--help']).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('source') + parser.add_argument('--destination', default='README.rst') + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals['get_help'] = get_help + + with io.open(source, 'r') as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, 'w') as f: + f.write(output) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-domains/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-domains/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-domains/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-domains/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-domains/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-domains/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-domains/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-domains/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-domains/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-domains/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-domains/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-domains/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-domains/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-domains/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-domains/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-domains/setup.cfg b/packages/google-cloud-domains/setup.cfg new file mode 100644 index 000000000000..c3a2b39f6528 --- /dev/null +++ b/packages/google-cloud-domains/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-domains/setup.py b/packages/google-cloud-domains/setup.py new file mode 100644 index 000000000000..7a8fc1550e85 --- /dev/null +++ b/packages/google-cloud-domains/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-domains" + + +description = "Google Cloud Domains API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/domains/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-domains/testing/.gitignore b/packages/google-cloud-domains/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-domains/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-domains/testing/constraints-3.10.txt b/packages/google-cloud-domains/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-domains/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-domains/testing/constraints-3.11.txt b/packages/google-cloud-domains/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-domains/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-domains/testing/constraints-3.12.txt b/packages/google-cloud-domains/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-domains/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-domains/testing/constraints-3.7.txt b/packages/google-cloud-domains/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-domains/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-domains/testing/constraints-3.8.txt b/packages/google-cloud-domains/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-domains/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-domains/testing/constraints-3.9.txt b/packages/google-cloud-domains/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-domains/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-domains/tests/__init__.py b/packages/google-cloud-domains/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-domains/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-domains/tests/unit/__init__.py b/packages/google-cloud-domains/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-domains/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-domains/tests/unit/gapic/__init__.py b/packages/google-cloud-domains/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-domains/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-domains/tests/unit/gapic/domains_v1/__init__.py b/packages/google-cloud-domains/tests/unit/gapic/domains_v1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-domains/tests/unit/gapic/domains_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-domains/tests/unit/gapic/domains_v1/test_domains.py b/packages/google-cloud-domains/tests/unit/gapic/domains_v1/test_domains.py new file mode 100644 index 000000000000..acb47a8049e6 --- /dev/null +++ b/packages/google-cloud-domains/tests/unit/gapic/domains_v1/test_domains.py @@ -0,0 +1,9877 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import money_pb2 # type: ignore +from google.type import postal_address_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.domains_v1.services.domains import ( + DomainsAsyncClient, + DomainsClient, + pagers, + transports, +) +from google.cloud.domains_v1.types import domains + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DomainsClient._get_default_mtls_endpoint(None) is None + assert DomainsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + DomainsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + ) + assert ( + DomainsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DomainsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert DomainsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DomainsClient, "grpc"), + (DomainsAsyncClient, "grpc_asyncio"), + (DomainsClient, "rest"), + ], +) +def test_domains_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "domains.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://domains.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DomainsGrpcTransport, "grpc"), + (transports.DomainsGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DomainsRestTransport, "rest"), + ], +) +def test_domains_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DomainsClient, "grpc"), + (DomainsAsyncClient, "grpc_asyncio"), + (DomainsClient, "rest"), + ], +) +def test_domains_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "domains.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://domains.googleapis.com" + ) + + +def test_domains_client_get_transport_class(): + transport = DomainsClient.get_transport_class() + available_transports = [ + transports.DomainsGrpcTransport, + transports.DomainsRestTransport, + ] + assert transport in available_transports + + transport = DomainsClient.get_transport_class("grpc") + assert transport == transports.DomainsGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DomainsClient, transports.DomainsGrpcTransport, "grpc"), + (DomainsAsyncClient, transports.DomainsGrpcAsyncIOTransport, "grpc_asyncio"), + (DomainsClient, transports.DomainsRestTransport, "rest"), + ], +) +@mock.patch.object( + DomainsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DomainsClient) +) +@mock.patch.object( + DomainsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DomainsAsyncClient) +) +def test_domains_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DomainsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DomainsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (DomainsClient, transports.DomainsGrpcTransport, "grpc", "true"), + ( + DomainsAsyncClient, + transports.DomainsGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (DomainsClient, transports.DomainsGrpcTransport, "grpc", "false"), + ( + DomainsAsyncClient, + transports.DomainsGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (DomainsClient, transports.DomainsRestTransport, "rest", "true"), + (DomainsClient, transports.DomainsRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + DomainsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DomainsClient) +) +@mock.patch.object( + DomainsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DomainsAsyncClient) +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_domains_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [DomainsClient, DomainsAsyncClient]) +@mock.patch.object( + DomainsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DomainsClient) +) +@mock.patch.object( + DomainsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DomainsAsyncClient) +) +def test_domains_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DomainsClient, transports.DomainsGrpcTransport, "grpc"), + (DomainsAsyncClient, transports.DomainsGrpcAsyncIOTransport, "grpc_asyncio"), + (DomainsClient, transports.DomainsRestTransport, "rest"), + ], +) +def test_domains_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (DomainsClient, transports.DomainsGrpcTransport, "grpc", grpc_helpers), + ( + DomainsAsyncClient, + transports.DomainsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (DomainsClient, transports.DomainsRestTransport, "rest", None), + ], +) +def test_domains_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_domains_client_client_options_from_dict(): + with mock.patch( + "google.cloud.domains_v1.services.domains.transports.DomainsGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DomainsClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (DomainsClient, transports.DomainsGrpcTransport, "grpc", grpc_helpers), + ( + DomainsAsyncClient, + transports.DomainsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_domains_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "domains.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="domains.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.SearchDomainsRequest, + dict, + ], +) +def test_search_domains(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_domains), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = domains.SearchDomainsResponse() + response = client.search_domains(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.SearchDomainsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.SearchDomainsResponse) + + +def test_search_domains_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_domains), "__call__") as call: + client.search_domains() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.SearchDomainsRequest() + + +@pytest.mark.asyncio +async def test_search_domains_async( + transport: str = "grpc_asyncio", request_type=domains.SearchDomainsRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_domains), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.SearchDomainsResponse() + ) + response = await client.search_domains(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.SearchDomainsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.SearchDomainsResponse) + + +@pytest.mark.asyncio +async def test_search_domains_async_from_dict(): + await test_search_domains_async(request_type=dict) + + +def test_search_domains_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.SearchDomainsRequest() + + request.location = "location_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_domains), "__call__") as call: + call.return_value = domains.SearchDomainsResponse() + client.search_domains(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_search_domains_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.SearchDomainsRequest() + + request.location = "location_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_domains), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.SearchDomainsResponse() + ) + await client.search_domains(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +def test_search_domains_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_domains), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = domains.SearchDomainsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.search_domains( + location="location_value", + query="query_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + arg = args[0].query + mock_val = "query_value" + assert arg == mock_val + + +def test_search_domains_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_domains( + domains.SearchDomainsRequest(), + location="location_value", + query="query_value", + ) + + +@pytest.mark.asyncio +async def test_search_domains_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_domains), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = domains.SearchDomainsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.SearchDomainsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.search_domains( + location="location_value", + query="query_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + arg = args[0].query + mock_val = "query_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_search_domains_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.search_domains( + domains.SearchDomainsRequest(), + location="location_value", + query="query_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RetrieveRegisterParametersRequest, + dict, + ], +) +def test_retrieve_register_parameters(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_register_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.RetrieveRegisterParametersResponse() + response = client.retrieve_register_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveRegisterParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.RetrieveRegisterParametersResponse) + + +def test_retrieve_register_parameters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_register_parameters), "__call__" + ) as call: + client.retrieve_register_parameters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveRegisterParametersRequest() + + +@pytest.mark.asyncio +async def test_retrieve_register_parameters_async( + transport: str = "grpc_asyncio", + request_type=domains.RetrieveRegisterParametersRequest, +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_register_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.RetrieveRegisterParametersResponse() + ) + response = await client.retrieve_register_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveRegisterParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.RetrieveRegisterParametersResponse) + + +@pytest.mark.asyncio +async def test_retrieve_register_parameters_async_from_dict(): + await test_retrieve_register_parameters_async(request_type=dict) + + +def test_retrieve_register_parameters_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RetrieveRegisterParametersRequest() + + request.location = "location_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_register_parameters), "__call__" + ) as call: + call.return_value = domains.RetrieveRegisterParametersResponse() + client.retrieve_register_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_retrieve_register_parameters_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RetrieveRegisterParametersRequest() + + request.location = "location_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_register_parameters), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.RetrieveRegisterParametersResponse() + ) + await client.retrieve_register_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +def test_retrieve_register_parameters_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_register_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.RetrieveRegisterParametersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.retrieve_register_parameters( + location="location_value", + domain_name="domain_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + arg = args[0].domain_name + mock_val = "domain_name_value" + assert arg == mock_val + + +def test_retrieve_register_parameters_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retrieve_register_parameters( + domains.RetrieveRegisterParametersRequest(), + location="location_value", + domain_name="domain_name_value", + ) + + +@pytest.mark.asyncio +async def test_retrieve_register_parameters_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_register_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.RetrieveRegisterParametersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.RetrieveRegisterParametersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.retrieve_register_parameters( + location="location_value", + domain_name="domain_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + arg = args[0].domain_name + mock_val = "domain_name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_retrieve_register_parameters_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.retrieve_register_parameters( + domains.RetrieveRegisterParametersRequest(), + location="location_value", + domain_name="domain_name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RegisterDomainRequest, + dict, + ], +) +def test_register_domain(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.register_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.register_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RegisterDomainRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_register_domain_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.register_domain), "__call__") as call: + client.register_domain() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RegisterDomainRequest() + + +@pytest.mark.asyncio +async def test_register_domain_async( + transport: str = "grpc_asyncio", request_type=domains.RegisterDomainRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.register_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.register_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RegisterDomainRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_register_domain_async_from_dict(): + await test_register_domain_async(request_type=dict) + + +def test_register_domain_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RegisterDomainRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.register_domain), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.register_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_register_domain_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RegisterDomainRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.register_domain), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.register_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_register_domain_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.register_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.register_domain( + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].registration + mock_val = domains.Registration(name="name_value") + assert arg == mock_val + arg = args[0].yearly_price + mock_val = money_pb2.Money(currency_code="currency_code_value") + assert arg == mock_val + + +def test_register_domain_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.register_domain( + domains.RegisterDomainRequest(), + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + ) + + +@pytest.mark.asyncio +async def test_register_domain_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.register_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.register_domain( + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].registration + mock_val = domains.Registration(name="name_value") + assert arg == mock_val + arg = args[0].yearly_price + mock_val = money_pb2.Money(currency_code="currency_code_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_register_domain_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.register_domain( + domains.RegisterDomainRequest(), + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RetrieveTransferParametersRequest, + dict, + ], +) +def test_retrieve_transfer_parameters(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_transfer_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.RetrieveTransferParametersResponse() + response = client.retrieve_transfer_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveTransferParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.RetrieveTransferParametersResponse) + + +def test_retrieve_transfer_parameters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_transfer_parameters), "__call__" + ) as call: + client.retrieve_transfer_parameters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveTransferParametersRequest() + + +@pytest.mark.asyncio +async def test_retrieve_transfer_parameters_async( + transport: str = "grpc_asyncio", + request_type=domains.RetrieveTransferParametersRequest, +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_transfer_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.RetrieveTransferParametersResponse() + ) + response = await client.retrieve_transfer_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveTransferParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.RetrieveTransferParametersResponse) + + +@pytest.mark.asyncio +async def test_retrieve_transfer_parameters_async_from_dict(): + await test_retrieve_transfer_parameters_async(request_type=dict) + + +def test_retrieve_transfer_parameters_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RetrieveTransferParametersRequest() + + request.location = "location_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_transfer_parameters), "__call__" + ) as call: + call.return_value = domains.RetrieveTransferParametersResponse() + client.retrieve_transfer_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_retrieve_transfer_parameters_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RetrieveTransferParametersRequest() + + request.location = "location_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_transfer_parameters), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.RetrieveTransferParametersResponse() + ) + await client.retrieve_transfer_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +def test_retrieve_transfer_parameters_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_transfer_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.RetrieveTransferParametersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.retrieve_transfer_parameters( + location="location_value", + domain_name="domain_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + arg = args[0].domain_name + mock_val = "domain_name_value" + assert arg == mock_val + + +def test_retrieve_transfer_parameters_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retrieve_transfer_parameters( + domains.RetrieveTransferParametersRequest(), + location="location_value", + domain_name="domain_name_value", + ) + + +@pytest.mark.asyncio +async def test_retrieve_transfer_parameters_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_transfer_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.RetrieveTransferParametersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.RetrieveTransferParametersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.retrieve_transfer_parameters( + location="location_value", + domain_name="domain_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + arg = args[0].domain_name + mock_val = "domain_name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_retrieve_transfer_parameters_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.retrieve_transfer_parameters( + domains.RetrieveTransferParametersRequest(), + location="location_value", + domain_name="domain_name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.TransferDomainRequest, + dict, + ], +) +def test_transfer_domain(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.transfer_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.transfer_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.TransferDomainRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_transfer_domain_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.transfer_domain), "__call__") as call: + client.transfer_domain() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.TransferDomainRequest() + + +@pytest.mark.asyncio +async def test_transfer_domain_async( + transport: str = "grpc_asyncio", request_type=domains.TransferDomainRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.transfer_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.transfer_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.TransferDomainRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_transfer_domain_async_from_dict(): + await test_transfer_domain_async(request_type=dict) + + +def test_transfer_domain_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.TransferDomainRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.transfer_domain), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.transfer_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_transfer_domain_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.TransferDomainRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.transfer_domain), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.transfer_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_transfer_domain_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.transfer_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.transfer_domain( + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + authorization_code=domains.AuthorizationCode(code="code_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].registration + mock_val = domains.Registration(name="name_value") + assert arg == mock_val + arg = args[0].yearly_price + mock_val = money_pb2.Money(currency_code="currency_code_value") + assert arg == mock_val + arg = args[0].authorization_code + mock_val = domains.AuthorizationCode(code="code_value") + assert arg == mock_val + + +def test_transfer_domain_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.transfer_domain( + domains.TransferDomainRequest(), + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + authorization_code=domains.AuthorizationCode(code="code_value"), + ) + + +@pytest.mark.asyncio +async def test_transfer_domain_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.transfer_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.transfer_domain( + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + authorization_code=domains.AuthorizationCode(code="code_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].registration + mock_val = domains.Registration(name="name_value") + assert arg == mock_val + arg = args[0].yearly_price + mock_val = money_pb2.Money(currency_code="currency_code_value") + assert arg == mock_val + arg = args[0].authorization_code + mock_val = domains.AuthorizationCode(code="code_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_transfer_domain_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.transfer_domain( + domains.TransferDomainRequest(), + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + authorization_code=domains.AuthorizationCode(code="code_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ListRegistrationsRequest, + dict, + ], +) +def test_list_registrations(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.ListRegistrationsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_registrations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ListRegistrationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRegistrationsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_registrations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + client.list_registrations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ListRegistrationsRequest() + + +@pytest.mark.asyncio +async def test_list_registrations_async( + transport: str = "grpc_asyncio", request_type=domains.ListRegistrationsRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.ListRegistrationsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_registrations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ListRegistrationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRegistrationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_registrations_async_from_dict(): + await test_list_registrations_async(request_type=dict) + + +def test_list_registrations_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ListRegistrationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + call.return_value = domains.ListRegistrationsResponse() + client.list_registrations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_registrations_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ListRegistrationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.ListRegistrationsResponse() + ) + await client.list_registrations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_registrations_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.ListRegistrationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_registrations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_registrations_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_registrations( + domains.ListRegistrationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_registrations_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.ListRegistrationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.ListRegistrationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_registrations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_registrations_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_registrations( + domains.ListRegistrationsRequest(), + parent="parent_value", + ) + + +def test_list_registrations_pager(transport_name: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + domains.Registration(), + ], + next_page_token="abc", + ), + domains.ListRegistrationsResponse( + registrations=[], + next_page_token="def", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + ], + next_page_token="ghi", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_registrations(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, domains.Registration) for i in results) + + +def test_list_registrations_pages(transport_name: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + domains.Registration(), + ], + next_page_token="abc", + ), + domains.ListRegistrationsResponse( + registrations=[], + next_page_token="def", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + ], + next_page_token="ghi", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + ], + ), + RuntimeError, + ) + pages = list(client.list_registrations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_registrations_async_pager(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + domains.Registration(), + ], + next_page_token="abc", + ), + domains.ListRegistrationsResponse( + registrations=[], + next_page_token="def", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + ], + next_page_token="ghi", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_registrations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, domains.Registration) for i in responses) + + +@pytest.mark.asyncio +async def test_list_registrations_async_pages(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + domains.Registration(), + ], + next_page_token="abc", + ), + domains.ListRegistrationsResponse( + registrations=[], + next_page_token="def", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + ], + next_page_token="ghi", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_registrations(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + domains.GetRegistrationRequest, + dict, + ], +) +def test_get_registration(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_registration), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = domains.Registration( + name="name_value", + domain_name="domain_name_value", + state=domains.Registration.State.REGISTRATION_PENDING, + issues=[domains.Registration.Issue.CONTACT_SUPPORT], + supported_privacy=[domains.ContactPrivacy.PUBLIC_CONTACT_DATA], + ) + response = client.get_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.GetRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.Registration) + assert response.name == "name_value" + assert response.domain_name == "domain_name_value" + assert response.state == domains.Registration.State.REGISTRATION_PENDING + assert response.issues == [domains.Registration.Issue.CONTACT_SUPPORT] + assert response.supported_privacy == [domains.ContactPrivacy.PUBLIC_CONTACT_DATA] + + +def test_get_registration_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_registration), "__call__") as call: + client.get_registration() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.GetRegistrationRequest() + + +@pytest.mark.asyncio +async def test_get_registration_async( + transport: str = "grpc_asyncio", request_type=domains.GetRegistrationRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_registration), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.Registration( + name="name_value", + domain_name="domain_name_value", + state=domains.Registration.State.REGISTRATION_PENDING, + issues=[domains.Registration.Issue.CONTACT_SUPPORT], + supported_privacy=[domains.ContactPrivacy.PUBLIC_CONTACT_DATA], + ) + ) + response = await client.get_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.GetRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.Registration) + assert response.name == "name_value" + assert response.domain_name == "domain_name_value" + assert response.state == domains.Registration.State.REGISTRATION_PENDING + assert response.issues == [domains.Registration.Issue.CONTACT_SUPPORT] + assert response.supported_privacy == [domains.ContactPrivacy.PUBLIC_CONTACT_DATA] + + +@pytest.mark.asyncio +async def test_get_registration_async_from_dict(): + await test_get_registration_async(request_type=dict) + + +def test_get_registration_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.GetRegistrationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_registration), "__call__") as call: + call.return_value = domains.Registration() + client.get_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_registration_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.GetRegistrationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_registration), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.Registration() + ) + await client.get_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_registration_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_registration), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = domains.Registration() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_registration( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_registration_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_registration( + domains.GetRegistrationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_registration_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_registration), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = domains.Registration() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.Registration() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_registration( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_registration_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_registration( + domains.GetRegistrationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.UpdateRegistrationRequest, + dict, + ], +) +def test_update_registration(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.UpdateRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_registration_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_registration), "__call__" + ) as call: + client.update_registration() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.UpdateRegistrationRequest() + + +@pytest.mark.asyncio +async def test_update_registration_async( + transport: str = "grpc_asyncio", request_type=domains.UpdateRegistrationRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.UpdateRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_registration_async_from_dict(): + await test_update_registration_async(request_type=dict) + + +def test_update_registration_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.UpdateRegistrationRequest() + + request.registration.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_registration), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_registration_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.UpdateRegistrationRequest() + + request.registration.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_registration), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration.name=name_value", + ) in kw["metadata"] + + +def test_update_registration_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_registration( + registration=domains.Registration(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = domains.Registration(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_registration_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_registration( + domains.UpdateRegistrationRequest(), + registration=domains.Registration(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_registration_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_registration( + registration=domains.Registration(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = domains.Registration(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_registration_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_registration( + domains.UpdateRegistrationRequest(), + registration=domains.Registration(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ConfigureManagementSettingsRequest, + dict, + ], +) +def test_configure_management_settings(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_management_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.configure_management_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureManagementSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_configure_management_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_management_settings), "__call__" + ) as call: + client.configure_management_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureManagementSettingsRequest() + + +@pytest.mark.asyncio +async def test_configure_management_settings_async( + transport: str = "grpc_asyncio", + request_type=domains.ConfigureManagementSettingsRequest, +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_management_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.configure_management_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureManagementSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_configure_management_settings_async_from_dict(): + await test_configure_management_settings_async(request_type=dict) + + +def test_configure_management_settings_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ConfigureManagementSettingsRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_management_settings), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.configure_management_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_configure_management_settings_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ConfigureManagementSettingsRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_management_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.configure_management_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +def test_configure_management_settings_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_management_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.configure_management_settings( + registration="registration_value", + management_settings=domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + arg = args[0].management_settings + mock_val = domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_configure_management_settings_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.configure_management_settings( + domains.ConfigureManagementSettingsRequest(), + registration="registration_value", + management_settings=domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_configure_management_settings_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_management_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.configure_management_settings( + registration="registration_value", + management_settings=domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + arg = args[0].management_settings + mock_val = domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_configure_management_settings_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.configure_management_settings( + domains.ConfigureManagementSettingsRequest(), + registration="registration_value", + management_settings=domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ConfigureDnsSettingsRequest, + dict, + ], +) +def test_configure_dns_settings(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_dns_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.configure_dns_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureDnsSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_configure_dns_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_dns_settings), "__call__" + ) as call: + client.configure_dns_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureDnsSettingsRequest() + + +@pytest.mark.asyncio +async def test_configure_dns_settings_async( + transport: str = "grpc_asyncio", request_type=domains.ConfigureDnsSettingsRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_dns_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.configure_dns_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureDnsSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_configure_dns_settings_async_from_dict(): + await test_configure_dns_settings_async(request_type=dict) + + +def test_configure_dns_settings_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ConfigureDnsSettingsRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_dns_settings), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.configure_dns_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_configure_dns_settings_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ConfigureDnsSettingsRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_dns_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.configure_dns_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +def test_configure_dns_settings_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_dns_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.configure_dns_settings( + registration="registration_value", + dns_settings=domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + arg = args[0].dns_settings + mock_val = domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_configure_dns_settings_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.configure_dns_settings( + domains.ConfigureDnsSettingsRequest(), + registration="registration_value", + dns_settings=domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_configure_dns_settings_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_dns_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.configure_dns_settings( + registration="registration_value", + dns_settings=domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + arg = args[0].dns_settings + mock_val = domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_configure_dns_settings_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.configure_dns_settings( + domains.ConfigureDnsSettingsRequest(), + registration="registration_value", + dns_settings=domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ConfigureContactSettingsRequest, + dict, + ], +) +def test_configure_contact_settings(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_contact_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.configure_contact_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureContactSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_configure_contact_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_contact_settings), "__call__" + ) as call: + client.configure_contact_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureContactSettingsRequest() + + +@pytest.mark.asyncio +async def test_configure_contact_settings_async( + transport: str = "grpc_asyncio", + request_type=domains.ConfigureContactSettingsRequest, +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_contact_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.configure_contact_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureContactSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_configure_contact_settings_async_from_dict(): + await test_configure_contact_settings_async(request_type=dict) + + +def test_configure_contact_settings_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ConfigureContactSettingsRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_contact_settings), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.configure_contact_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_configure_contact_settings_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ConfigureContactSettingsRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_contact_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.configure_contact_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +def test_configure_contact_settings_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_contact_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.configure_contact_settings( + registration="registration_value", + contact_settings=domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + arg = args[0].contact_settings + mock_val = domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_configure_contact_settings_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.configure_contact_settings( + domains.ConfigureContactSettingsRequest(), + registration="registration_value", + contact_settings=domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_configure_contact_settings_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_contact_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.configure_contact_settings( + registration="registration_value", + contact_settings=domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + arg = args[0].contact_settings + mock_val = domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_configure_contact_settings_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.configure_contact_settings( + domains.ConfigureContactSettingsRequest(), + registration="registration_value", + contact_settings=domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ExportRegistrationRequest, + dict, + ], +) +def test_export_registration(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.export_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ExportRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_registration_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_registration), "__call__" + ) as call: + client.export_registration() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ExportRegistrationRequest() + + +@pytest.mark.asyncio +async def test_export_registration_async( + transport: str = "grpc_asyncio", request_type=domains.ExportRegistrationRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ExportRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_export_registration_async_from_dict(): + await test_export_registration_async(request_type=dict) + + +def test_export_registration_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ExportRegistrationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_registration), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_registration_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ExportRegistrationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_registration), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.export_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_export_registration_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.export_registration( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_export_registration_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_registration( + domains.ExportRegistrationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_export_registration_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.export_registration( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_export_registration_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.export_registration( + domains.ExportRegistrationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.DeleteRegistrationRequest, + dict, + ], +) +def test_delete_registration(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.DeleteRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_registration_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_registration), "__call__" + ) as call: + client.delete_registration() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.DeleteRegistrationRequest() + + +@pytest.mark.asyncio +async def test_delete_registration_async( + transport: str = "grpc_asyncio", request_type=domains.DeleteRegistrationRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.DeleteRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_registration_async_from_dict(): + await test_delete_registration_async(request_type=dict) + + +def test_delete_registration_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.DeleteRegistrationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_registration), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_registration_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.DeleteRegistrationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_registration), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_registration_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_registration( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_registration_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_registration( + domains.DeleteRegistrationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_registration_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_registration( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_registration_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_registration( + domains.DeleteRegistrationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RetrieveAuthorizationCodeRequest, + dict, + ], +) +def test_retrieve_authorization_code(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.AuthorizationCode( + code="code_value", + ) + response = client.retrieve_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveAuthorizationCodeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.AuthorizationCode) + assert response.code == "code_value" + + +def test_retrieve_authorization_code_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_authorization_code), "__call__" + ) as call: + client.retrieve_authorization_code() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveAuthorizationCodeRequest() + + +@pytest.mark.asyncio +async def test_retrieve_authorization_code_async( + transport: str = "grpc_asyncio", + request_type=domains.RetrieveAuthorizationCodeRequest, +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.AuthorizationCode( + code="code_value", + ) + ) + response = await client.retrieve_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveAuthorizationCodeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.AuthorizationCode) + assert response.code == "code_value" + + +@pytest.mark.asyncio +async def test_retrieve_authorization_code_async_from_dict(): + await test_retrieve_authorization_code_async(request_type=dict) + + +def test_retrieve_authorization_code_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RetrieveAuthorizationCodeRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_authorization_code), "__call__" + ) as call: + call.return_value = domains.AuthorizationCode() + client.retrieve_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_retrieve_authorization_code_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RetrieveAuthorizationCodeRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_authorization_code), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.AuthorizationCode() + ) + await client.retrieve_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +def test_retrieve_authorization_code_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.AuthorizationCode() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.retrieve_authorization_code( + registration="registration_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + + +def test_retrieve_authorization_code_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retrieve_authorization_code( + domains.RetrieveAuthorizationCodeRequest(), + registration="registration_value", + ) + + +@pytest.mark.asyncio +async def test_retrieve_authorization_code_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.AuthorizationCode() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.AuthorizationCode() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.retrieve_authorization_code( + registration="registration_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_retrieve_authorization_code_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.retrieve_authorization_code( + domains.RetrieveAuthorizationCodeRequest(), + registration="registration_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ResetAuthorizationCodeRequest, + dict, + ], +) +def test_reset_authorization_code(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.AuthorizationCode( + code="code_value", + ) + response = client.reset_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ResetAuthorizationCodeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.AuthorizationCode) + assert response.code == "code_value" + + +def test_reset_authorization_code_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_authorization_code), "__call__" + ) as call: + client.reset_authorization_code() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ResetAuthorizationCodeRequest() + + +@pytest.mark.asyncio +async def test_reset_authorization_code_async( + transport: str = "grpc_asyncio", request_type=domains.ResetAuthorizationCodeRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.AuthorizationCode( + code="code_value", + ) + ) + response = await client.reset_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ResetAuthorizationCodeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.AuthorizationCode) + assert response.code == "code_value" + + +@pytest.mark.asyncio +async def test_reset_authorization_code_async_from_dict(): + await test_reset_authorization_code_async(request_type=dict) + + +def test_reset_authorization_code_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ResetAuthorizationCodeRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_authorization_code), "__call__" + ) as call: + call.return_value = domains.AuthorizationCode() + client.reset_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_reset_authorization_code_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ResetAuthorizationCodeRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_authorization_code), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.AuthorizationCode() + ) + await client.reset_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +def test_reset_authorization_code_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.AuthorizationCode() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.reset_authorization_code( + registration="registration_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + + +def test_reset_authorization_code_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reset_authorization_code( + domains.ResetAuthorizationCodeRequest(), + registration="registration_value", + ) + + +@pytest.mark.asyncio +async def test_reset_authorization_code_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.AuthorizationCode() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.AuthorizationCode() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.reset_authorization_code( + registration="registration_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_reset_authorization_code_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.reset_authorization_code( + domains.ResetAuthorizationCodeRequest(), + registration="registration_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.SearchDomainsRequest, + dict, + ], +) +def test_search_domains_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.SearchDomainsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.SearchDomainsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.search_domains(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.SearchDomainsResponse) + + +def test_search_domains_rest_required_fields(request_type=domains.SearchDomainsRequest): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["query"] = "" + request_init["location"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "query" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_domains._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "query" in jsonified_request + assert jsonified_request["query"] == request_init["query"] + + jsonified_request["query"] = "query_value" + jsonified_request["location"] = "location_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_domains._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("query",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" + assert "location" in jsonified_request + assert jsonified_request["location"] == "location_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = domains.SearchDomainsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = domains.SearchDomainsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.search_domains(request) + + expected_params = [ + ( + "query", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_search_domains_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.search_domains._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("query",)) + & set( + ( + "query", + "location", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_domains_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DomainsRestInterceptor, "post_search_domains" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_search_domains" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.SearchDomainsRequest.pb(domains.SearchDomainsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = domains.SearchDomainsResponse.to_json( + domains.SearchDomainsResponse() + ) + + request = domains.SearchDomainsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = domains.SearchDomainsResponse() + + client.search_domains( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_domains_rest_bad_request( + transport: str = "rest", request_type=domains.SearchDomainsRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_domains(request) + + +def test_search_domains_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.SearchDomainsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"location": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + location="location_value", + query="query_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.SearchDomainsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.search_domains(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{location=projects/*/locations/*}/registrations:searchDomains" + % client.transport._host, + args[1], + ) + + +def test_search_domains_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_domains( + domains.SearchDomainsRequest(), + location="location_value", + query="query_value", + ) + + +def test_search_domains_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RetrieveRegisterParametersRequest, + dict, + ], +) +def test_retrieve_register_parameters_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.RetrieveRegisterParametersResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.RetrieveRegisterParametersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.retrieve_register_parameters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.RetrieveRegisterParametersResponse) + + +def test_retrieve_register_parameters_rest_required_fields( + request_type=domains.RetrieveRegisterParametersRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["domain_name"] = "" + request_init["location"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "domainName" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_register_parameters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "domainName" in jsonified_request + assert jsonified_request["domainName"] == request_init["domain_name"] + + jsonified_request["domainName"] = "domain_name_value" + jsonified_request["location"] = "location_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_register_parameters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("domain_name",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "domainName" in jsonified_request + assert jsonified_request["domainName"] == "domain_name_value" + assert "location" in jsonified_request + assert jsonified_request["location"] == "location_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = domains.RetrieveRegisterParametersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = domains.RetrieveRegisterParametersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.retrieve_register_parameters(request) + + expected_params = [ + ( + "domainName", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_retrieve_register_parameters_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.retrieve_register_parameters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("domainName",)) + & set( + ( + "domainName", + "location", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_retrieve_register_parameters_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DomainsRestInterceptor, "post_retrieve_register_parameters" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_retrieve_register_parameters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.RetrieveRegisterParametersRequest.pb( + domains.RetrieveRegisterParametersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = domains.RetrieveRegisterParametersResponse.to_json( + domains.RetrieveRegisterParametersResponse() + ) + + request = domains.RetrieveRegisterParametersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = domains.RetrieveRegisterParametersResponse() + + client.retrieve_register_parameters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_retrieve_register_parameters_rest_bad_request( + transport: str = "rest", request_type=domains.RetrieveRegisterParametersRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.retrieve_register_parameters(request) + + +def test_retrieve_register_parameters_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.RetrieveRegisterParametersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"location": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + location="location_value", + domain_name="domain_name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.RetrieveRegisterParametersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.retrieve_register_parameters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{location=projects/*/locations/*}/registrations:retrieveRegisterParameters" + % client.transport._host, + args[1], + ) + + +def test_retrieve_register_parameters_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retrieve_register_parameters( + domains.RetrieveRegisterParametersRequest(), + location="location_value", + domain_name="domain_name_value", + ) + + +def test_retrieve_register_parameters_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RegisterDomainRequest, + dict, + ], +) +def test_register_domain_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.register_domain(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_register_domain_rest_required_fields( + request_type=domains.RegisterDomainRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).register_domain._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).register_domain._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.register_domain(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_register_domain_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.register_domain._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "registration", + "yearlyPrice", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_register_domain_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_register_domain" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_register_domain" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.RegisterDomainRequest.pb(domains.RegisterDomainRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.RegisterDomainRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.register_domain( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_register_domain_rest_bad_request( + transport: str = "rest", request_type=domains.RegisterDomainRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.register_domain(request) + + +def test_register_domain_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.register_domain(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/registrations:register" + % client.transport._host, + args[1], + ) + + +def test_register_domain_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.register_domain( + domains.RegisterDomainRequest(), + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + ) + + +def test_register_domain_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RetrieveTransferParametersRequest, + dict, + ], +) +def test_retrieve_transfer_parameters_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.RetrieveTransferParametersResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.RetrieveTransferParametersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.retrieve_transfer_parameters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.RetrieveTransferParametersResponse) + + +def test_retrieve_transfer_parameters_rest_required_fields( + request_type=domains.RetrieveTransferParametersRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["domain_name"] = "" + request_init["location"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "domainName" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_transfer_parameters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "domainName" in jsonified_request + assert jsonified_request["domainName"] == request_init["domain_name"] + + jsonified_request["domainName"] = "domain_name_value" + jsonified_request["location"] = "location_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_transfer_parameters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("domain_name",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "domainName" in jsonified_request + assert jsonified_request["domainName"] == "domain_name_value" + assert "location" in jsonified_request + assert jsonified_request["location"] == "location_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = domains.RetrieveTransferParametersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = domains.RetrieveTransferParametersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.retrieve_transfer_parameters(request) + + expected_params = [ + ( + "domainName", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_retrieve_transfer_parameters_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.retrieve_transfer_parameters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("domainName",)) + & set( + ( + "domainName", + "location", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_retrieve_transfer_parameters_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DomainsRestInterceptor, "post_retrieve_transfer_parameters" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_retrieve_transfer_parameters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.RetrieveTransferParametersRequest.pb( + domains.RetrieveTransferParametersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = domains.RetrieveTransferParametersResponse.to_json( + domains.RetrieveTransferParametersResponse() + ) + + request = domains.RetrieveTransferParametersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = domains.RetrieveTransferParametersResponse() + + client.retrieve_transfer_parameters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_retrieve_transfer_parameters_rest_bad_request( + transport: str = "rest", request_type=domains.RetrieveTransferParametersRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.retrieve_transfer_parameters(request) + + +def test_retrieve_transfer_parameters_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.RetrieveTransferParametersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"location": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + location="location_value", + domain_name="domain_name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.RetrieveTransferParametersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.retrieve_transfer_parameters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{location=projects/*/locations/*}/registrations:retrieveTransferParameters" + % client.transport._host, + args[1], + ) + + +def test_retrieve_transfer_parameters_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retrieve_transfer_parameters( + domains.RetrieveTransferParametersRequest(), + location="location_value", + domain_name="domain_name_value", + ) + + +def test_retrieve_transfer_parameters_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.TransferDomainRequest, + dict, + ], +) +def test_transfer_domain_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.transfer_domain(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_transfer_domain_rest_required_fields( + request_type=domains.TransferDomainRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).transfer_domain._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).transfer_domain._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.transfer_domain(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_transfer_domain_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.transfer_domain._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "registration", + "yearlyPrice", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_transfer_domain_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_transfer_domain" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_transfer_domain" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.TransferDomainRequest.pb(domains.TransferDomainRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.TransferDomainRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.transfer_domain( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_transfer_domain_rest_bad_request( + transport: str = "rest", request_type=domains.TransferDomainRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.transfer_domain(request) + + +def test_transfer_domain_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + authorization_code=domains.AuthorizationCode(code="code_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.transfer_domain(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/registrations:transfer" + % client.transport._host, + args[1], + ) + + +def test_transfer_domain_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.transfer_domain( + domains.TransferDomainRequest(), + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + authorization_code=domains.AuthorizationCode(code="code_value"), + ) + + +def test_transfer_domain_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ListRegistrationsRequest, + dict, + ], +) +def test_list_registrations_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.ListRegistrationsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.ListRegistrationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_registrations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRegistrationsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_registrations_rest_required_fields( + request_type=domains.ListRegistrationsRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_registrations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_registrations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = domains.ListRegistrationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = domains.ListRegistrationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_registrations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_registrations_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_registrations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_registrations_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DomainsRestInterceptor, "post_list_registrations" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_list_registrations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.ListRegistrationsRequest.pb( + domains.ListRegistrationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = domains.ListRegistrationsResponse.to_json( + domains.ListRegistrationsResponse() + ) + + request = domains.ListRegistrationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = domains.ListRegistrationsResponse() + + client.list_registrations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_registrations_rest_bad_request( + transport: str = "rest", request_type=domains.ListRegistrationsRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_registrations(request) + + +def test_list_registrations_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.ListRegistrationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.ListRegistrationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_registrations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/registrations" + % client.transport._host, + args[1], + ) + + +def test_list_registrations_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_registrations( + domains.ListRegistrationsRequest(), + parent="parent_value", + ) + + +def test_list_registrations_rest_pager(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + domains.Registration(), + ], + next_page_token="abc", + ), + domains.ListRegistrationsResponse( + registrations=[], + next_page_token="def", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + ], + next_page_token="ghi", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(domains.ListRegistrationsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_registrations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, domains.Registration) for i in results) + + pages = list(client.list_registrations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + domains.GetRegistrationRequest, + dict, + ], +) +def test_get_registration_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/registrations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.Registration( + name="name_value", + domain_name="domain_name_value", + state=domains.Registration.State.REGISTRATION_PENDING, + issues=[domains.Registration.Issue.CONTACT_SUPPORT], + supported_privacy=[domains.ContactPrivacy.PUBLIC_CONTACT_DATA], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.Registration.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_registration(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.Registration) + assert response.name == "name_value" + assert response.domain_name == "domain_name_value" + assert response.state == domains.Registration.State.REGISTRATION_PENDING + assert response.issues == [domains.Registration.Issue.CONTACT_SUPPORT] + assert response.supported_privacy == [domains.ContactPrivacy.PUBLIC_CONTACT_DATA] + + +def test_get_registration_rest_required_fields( + request_type=domains.GetRegistrationRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = domains.Registration() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = domains.Registration.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_registration(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_registration_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_registration._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_registration_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DomainsRestInterceptor, "post_get_registration" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_get_registration" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.GetRegistrationRequest.pb(domains.GetRegistrationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = domains.Registration.to_json(domains.Registration()) + + request = domains.GetRegistrationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = domains.Registration() + + client.get_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_registration_rest_bad_request( + transport: str = "rest", request_type=domains.GetRegistrationRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/registrations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_registration(request) + + +def test_get_registration_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.Registration() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.Registration.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_registration(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/registrations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_registration_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_registration( + domains.GetRegistrationRequest(), + name="name_value", + ) + + +def test_get_registration_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.UpdateRegistrationRequest, + dict, + ], +) +def test_update_registration_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": { + "name": "projects/sample1/locations/sample2/registrations/sample3" + } + } + request_init["registration"] = { + "name": "projects/sample1/locations/sample2/registrations/sample3", + "domain_name": "domain_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "expire_time": {}, + "state": 1, + "issues": [1], + "labels": {}, + "management_settings": {"renewal_method": 1, "transfer_lock_state": 1}, + "dns_settings": { + "custom_dns": { + "name_servers": ["name_servers_value1", "name_servers_value2"], + "ds_records": [ + { + "key_tag": 740, + "algorithm": 1, + "digest_type": 1, + "digest": "digest_value", + } + ], + }, + "google_domains_dns": { + "name_servers": ["name_servers_value1", "name_servers_value2"], + "ds_state": 1, + "ds_records": {}, + }, + "glue_records": [ + { + "host_name": "host_name_value", + "ipv4_addresses": [ + "ipv4_addresses_value1", + "ipv4_addresses_value2", + ], + "ipv6_addresses": [ + "ipv6_addresses_value1", + "ipv6_addresses_value2", + ], + } + ], + }, + "contact_settings": { + "privacy": 1, + "registrant_contact": { + "postal_address": { + "revision": 879, + "region_code": "region_code_value", + "language_code": "language_code_value", + "postal_code": "postal_code_value", + "sorting_code": "sorting_code_value", + "administrative_area": "administrative_area_value", + "locality": "locality_value", + "sublocality": "sublocality_value", + "address_lines": ["address_lines_value1", "address_lines_value2"], + "recipients": ["recipients_value1", "recipients_value2"], + "organization": "organization_value", + }, + "email": "email_value", + "phone_number": "phone_number_value", + "fax_number": "fax_number_value", + }, + "admin_contact": {}, + "technical_contact": {}, + }, + "pending_contact_settings": {}, + "supported_privacy": [1], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_registration(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_registration_rest_required_fields( + request_type=domains.UpdateRegistrationRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_registration._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_registration(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_registration_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_registration._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_registration_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_update_registration" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_update_registration" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.UpdateRegistrationRequest.pb( + domains.UpdateRegistrationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.UpdateRegistrationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_registration_rest_bad_request( + transport: str = "rest", request_type=domains.UpdateRegistrationRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": { + "name": "projects/sample1/locations/sample2/registrations/sample3" + } + } + request_init["registration"] = { + "name": "projects/sample1/locations/sample2/registrations/sample3", + "domain_name": "domain_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "expire_time": {}, + "state": 1, + "issues": [1], + "labels": {}, + "management_settings": {"renewal_method": 1, "transfer_lock_state": 1}, + "dns_settings": { + "custom_dns": { + "name_servers": ["name_servers_value1", "name_servers_value2"], + "ds_records": [ + { + "key_tag": 740, + "algorithm": 1, + "digest_type": 1, + "digest": "digest_value", + } + ], + }, + "google_domains_dns": { + "name_servers": ["name_servers_value1", "name_servers_value2"], + "ds_state": 1, + "ds_records": {}, + }, + "glue_records": [ + { + "host_name": "host_name_value", + "ipv4_addresses": [ + "ipv4_addresses_value1", + "ipv4_addresses_value2", + ], + "ipv6_addresses": [ + "ipv6_addresses_value1", + "ipv6_addresses_value2", + ], + } + ], + }, + "contact_settings": { + "privacy": 1, + "registrant_contact": { + "postal_address": { + "revision": 879, + "region_code": "region_code_value", + "language_code": "language_code_value", + "postal_code": "postal_code_value", + "sorting_code": "sorting_code_value", + "administrative_area": "administrative_area_value", + "locality": "locality_value", + "sublocality": "sublocality_value", + "address_lines": ["address_lines_value1", "address_lines_value2"], + "recipients": ["recipients_value1", "recipients_value2"], + "organization": "organization_value", + }, + "email": "email_value", + "phone_number": "phone_number_value", + "fax_number": "fax_number_value", + }, + "admin_contact": {}, + "technical_contact": {}, + }, + "pending_contact_settings": {}, + "supported_privacy": [1], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_registration(request) + + +def test_update_registration_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "registration": { + "name": "projects/sample1/locations/sample2/registrations/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + registration=domains.Registration(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_registration(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{registration.name=projects/*/locations/*/registrations/*}" + % client.transport._host, + args[1], + ) + + +def test_update_registration_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_registration( + domains.UpdateRegistrationRequest(), + registration=domains.Registration(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_registration_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ConfigureManagementSettingsRequest, + dict, + ], +) +def test_configure_management_settings_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.configure_management_settings(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_configure_management_settings_rest_required_fields( + request_type=domains.ConfigureManagementSettingsRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["registration"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).configure_management_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["registration"] = "registration_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).configure_management_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "registration" in jsonified_request + assert jsonified_request["registration"] == "registration_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.configure_management_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_configure_management_settings_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.configure_management_settings._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "registration", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_configure_management_settings_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_configure_management_settings" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_configure_management_settings" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.ConfigureManagementSettingsRequest.pb( + domains.ConfigureManagementSettingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.ConfigureManagementSettingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.configure_management_settings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_configure_management_settings_rest_bad_request( + transport: str = "rest", request_type=domains.ConfigureManagementSettingsRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.configure_management_settings(request) + + +def test_configure_management_settings_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + registration="registration_value", + management_settings=domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.configure_management_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{registration=projects/*/locations/*/registrations/*}:configureManagementSettings" + % client.transport._host, + args[1], + ) + + +def test_configure_management_settings_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.configure_management_settings( + domains.ConfigureManagementSettingsRequest(), + registration="registration_value", + management_settings=domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_configure_management_settings_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ConfigureDnsSettingsRequest, + dict, + ], +) +def test_configure_dns_settings_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.configure_dns_settings(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_configure_dns_settings_rest_required_fields( + request_type=domains.ConfigureDnsSettingsRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["registration"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).configure_dns_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["registration"] = "registration_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).configure_dns_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "registration" in jsonified_request + assert jsonified_request["registration"] == "registration_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.configure_dns_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_configure_dns_settings_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.configure_dns_settings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "registration", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_configure_dns_settings_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_configure_dns_settings" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_configure_dns_settings" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.ConfigureDnsSettingsRequest.pb( + domains.ConfigureDnsSettingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.ConfigureDnsSettingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.configure_dns_settings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_configure_dns_settings_rest_bad_request( + transport: str = "rest", request_type=domains.ConfigureDnsSettingsRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.configure_dns_settings(request) + + +def test_configure_dns_settings_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + registration="registration_value", + dns_settings=domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.configure_dns_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{registration=projects/*/locations/*/registrations/*}:configureDnsSettings" + % client.transport._host, + args[1], + ) + + +def test_configure_dns_settings_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.configure_dns_settings( + domains.ConfigureDnsSettingsRequest(), + registration="registration_value", + dns_settings=domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_configure_dns_settings_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ConfigureContactSettingsRequest, + dict, + ], +) +def test_configure_contact_settings_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.configure_contact_settings(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_configure_contact_settings_rest_required_fields( + request_type=domains.ConfigureContactSettingsRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["registration"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).configure_contact_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["registration"] = "registration_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).configure_contact_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "registration" in jsonified_request + assert jsonified_request["registration"] == "registration_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.configure_contact_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_configure_contact_settings_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.configure_contact_settings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "registration", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_configure_contact_settings_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_configure_contact_settings" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_configure_contact_settings" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.ConfigureContactSettingsRequest.pb( + domains.ConfigureContactSettingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.ConfigureContactSettingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.configure_contact_settings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_configure_contact_settings_rest_bad_request( + transport: str = "rest", request_type=domains.ConfigureContactSettingsRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.configure_contact_settings(request) + + +def test_configure_contact_settings_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + registration="registration_value", + contact_settings=domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.configure_contact_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{registration=projects/*/locations/*/registrations/*}:configureContactSettings" + % client.transport._host, + args[1], + ) + + +def test_configure_contact_settings_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.configure_contact_settings( + domains.ConfigureContactSettingsRequest(), + registration="registration_value", + contact_settings=domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_configure_contact_settings_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ExportRegistrationRequest, + dict, + ], +) +def test_export_registration_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/registrations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.export_registration(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_export_registration_rest_required_fields( + request_type=domains.ExportRegistrationRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.export_registration(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_export_registration_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.export_registration._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_registration_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_export_registration" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_export_registration" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.ExportRegistrationRequest.pb( + domains.ExportRegistrationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.ExportRegistrationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_registration_rest_bad_request( + transport: str = "rest", request_type=domains.ExportRegistrationRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/registrations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_registration(request) + + +def test_export_registration_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.export_registration(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/registrations/*}:export" + % client.transport._host, + args[1], + ) + + +def test_export_registration_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_registration( + domains.ExportRegistrationRequest(), + name="name_value", + ) + + +def test_export_registration_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.DeleteRegistrationRequest, + dict, + ], +) +def test_delete_registration_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/registrations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_registration(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_registration_rest_required_fields( + request_type=domains.DeleteRegistrationRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_registration(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_registration_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_registration._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_registration_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_delete_registration" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_delete_registration" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.DeleteRegistrationRequest.pb( + domains.DeleteRegistrationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.DeleteRegistrationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_registration_rest_bad_request( + transport: str = "rest", request_type=domains.DeleteRegistrationRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/registrations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_registration(request) + + +def test_delete_registration_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_registration(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/registrations/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_registration_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_registration( + domains.DeleteRegistrationRequest(), + name="name_value", + ) + + +def test_delete_registration_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RetrieveAuthorizationCodeRequest, + dict, + ], +) +def test_retrieve_authorization_code_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.AuthorizationCode( + code="code_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.AuthorizationCode.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.retrieve_authorization_code(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.AuthorizationCode) + assert response.code == "code_value" + + +def test_retrieve_authorization_code_rest_required_fields( + request_type=domains.RetrieveAuthorizationCodeRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["registration"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_authorization_code._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["registration"] = "registration_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_authorization_code._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "registration" in jsonified_request + assert jsonified_request["registration"] == "registration_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = domains.AuthorizationCode() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = domains.AuthorizationCode.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.retrieve_authorization_code(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_retrieve_authorization_code_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.retrieve_authorization_code._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("registration",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_retrieve_authorization_code_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DomainsRestInterceptor, "post_retrieve_authorization_code" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_retrieve_authorization_code" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.RetrieveAuthorizationCodeRequest.pb( + domains.RetrieveAuthorizationCodeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = domains.AuthorizationCode.to_json( + domains.AuthorizationCode() + ) + + request = domains.RetrieveAuthorizationCodeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = domains.AuthorizationCode() + + client.retrieve_authorization_code( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_retrieve_authorization_code_rest_bad_request( + transport: str = "rest", request_type=domains.RetrieveAuthorizationCodeRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.retrieve_authorization_code(request) + + +def test_retrieve_authorization_code_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.AuthorizationCode() + + # get arguments that satisfy an http rule for this method + sample_request = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + registration="registration_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.AuthorizationCode.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.retrieve_authorization_code(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{registration=projects/*/locations/*/registrations/*}:retrieveAuthorizationCode" + % client.transport._host, + args[1], + ) + + +def test_retrieve_authorization_code_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retrieve_authorization_code( + domains.RetrieveAuthorizationCodeRequest(), + registration="registration_value", + ) + + +def test_retrieve_authorization_code_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ResetAuthorizationCodeRequest, + dict, + ], +) +def test_reset_authorization_code_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.AuthorizationCode( + code="code_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.AuthorizationCode.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.reset_authorization_code(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.AuthorizationCode) + assert response.code == "code_value" + + +def test_reset_authorization_code_rest_required_fields( + request_type=domains.ResetAuthorizationCodeRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["registration"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reset_authorization_code._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["registration"] = "registration_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reset_authorization_code._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "registration" in jsonified_request + assert jsonified_request["registration"] == "registration_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = domains.AuthorizationCode() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = domains.AuthorizationCode.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.reset_authorization_code(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_reset_authorization_code_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.reset_authorization_code._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("registration",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reset_authorization_code_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DomainsRestInterceptor, "post_reset_authorization_code" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_reset_authorization_code" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.ResetAuthorizationCodeRequest.pb( + domains.ResetAuthorizationCodeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = domains.AuthorizationCode.to_json( + domains.AuthorizationCode() + ) + + request = domains.ResetAuthorizationCodeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = domains.AuthorizationCode() + + client.reset_authorization_code( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reset_authorization_code_rest_bad_request( + transport: str = "rest", request_type=domains.ResetAuthorizationCodeRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reset_authorization_code(request) + + +def test_reset_authorization_code_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.AuthorizationCode() + + # get arguments that satisfy an http rule for this method + sample_request = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + registration="registration_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.AuthorizationCode.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.reset_authorization_code(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{registration=projects/*/locations/*/registrations/*}:resetAuthorizationCode" + % client.transport._host, + args[1], + ) + + +def test_reset_authorization_code_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reset_authorization_code( + domains.ResetAuthorizationCodeRequest(), + registration="registration_value", + ) + + +def test_reset_authorization_code_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DomainsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DomainsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DomainsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DomainsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DomainsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DomainsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DomainsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DomainsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DomainsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DomainsClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DomainsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DomainsGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DomainsGrpcTransport, + transports.DomainsGrpcAsyncIOTransport, + transports.DomainsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DomainsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DomainsGrpcTransport, + ) + + +def test_domains_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DomainsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_domains_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.domains_v1.services.domains.transports.DomainsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DomainsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "search_domains", + "retrieve_register_parameters", + "register_domain", + "retrieve_transfer_parameters", + "transfer_domain", + "list_registrations", + "get_registration", + "update_registration", + "configure_management_settings", + "configure_dns_settings", + "configure_contact_settings", + "export_registration", + "delete_registration", + "retrieve_authorization_code", + "reset_authorization_code", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_domains_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.domains_v1.services.domains.transports.DomainsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DomainsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_domains_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.domains_v1.services.domains.transports.DomainsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DomainsTransport() + adc.assert_called_once() + + +def test_domains_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DomainsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DomainsGrpcTransport, + transports.DomainsGrpcAsyncIOTransport, + ], +) +def test_domains_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DomainsGrpcTransport, + transports.DomainsGrpcAsyncIOTransport, + transports.DomainsRestTransport, + ], +) +def test_domains_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DomainsGrpcTransport, grpc_helpers), + (transports.DomainsGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_domains_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "domains.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="domains.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.DomainsGrpcTransport, transports.DomainsGrpcAsyncIOTransport], +) +def test_domains_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_domains_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DomainsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_domains_rest_lro_client(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_domains_host_no_port(transport_name): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="domains.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "domains.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://domains.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_domains_host_with_port(transport_name): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="domains.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "domains.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://domains.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_domains_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DomainsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DomainsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.search_domains._session + session2 = client2.transport.search_domains._session + assert session1 != session2 + session1 = client1.transport.retrieve_register_parameters._session + session2 = client2.transport.retrieve_register_parameters._session + assert session1 != session2 + session1 = client1.transport.register_domain._session + session2 = client2.transport.register_domain._session + assert session1 != session2 + session1 = client1.transport.retrieve_transfer_parameters._session + session2 = client2.transport.retrieve_transfer_parameters._session + assert session1 != session2 + session1 = client1.transport.transfer_domain._session + session2 = client2.transport.transfer_domain._session + assert session1 != session2 + session1 = client1.transport.list_registrations._session + session2 = client2.transport.list_registrations._session + assert session1 != session2 + session1 = client1.transport.get_registration._session + session2 = client2.transport.get_registration._session + assert session1 != session2 + session1 = client1.transport.update_registration._session + session2 = client2.transport.update_registration._session + assert session1 != session2 + session1 = client1.transport.configure_management_settings._session + session2 = client2.transport.configure_management_settings._session + assert session1 != session2 + session1 = client1.transport.configure_dns_settings._session + session2 = client2.transport.configure_dns_settings._session + assert session1 != session2 + session1 = client1.transport.configure_contact_settings._session + session2 = client2.transport.configure_contact_settings._session + assert session1 != session2 + session1 = client1.transport.export_registration._session + session2 = client2.transport.export_registration._session + assert session1 != session2 + session1 = client1.transport.delete_registration._session + session2 = client2.transport.delete_registration._session + assert session1 != session2 + session1 = client1.transport.retrieve_authorization_code._session + session2 = client2.transport.retrieve_authorization_code._session + assert session1 != session2 + session1 = client1.transport.reset_authorization_code._session + session2 = client2.transport.reset_authorization_code._session + assert session1 != session2 + + +def test_domains_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DomainsGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_domains_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DomainsGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.DomainsGrpcTransport, transports.DomainsGrpcAsyncIOTransport], +) +def test_domains_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.DomainsGrpcTransport, transports.DomainsGrpcAsyncIOTransport], +) +def test_domains_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_domains_grpc_lro_client(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_domains_grpc_lro_async_client(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_registration_path(): + project = "squid" + location = "clam" + registration = "whelk" + expected = ( + "projects/{project}/locations/{location}/registrations/{registration}".format( + project=project, + location=location, + registration=registration, + ) + ) + actual = DomainsClient.registration_path(project, location, registration) + assert expected == actual + + +def test_parse_registration_path(): + expected = { + "project": "octopus", + "location": "oyster", + "registration": "nudibranch", + } + path = DomainsClient.registration_path(**expected) + + # Check that the path construction is reversible. + actual = DomainsClient.parse_registration_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DomainsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = DomainsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DomainsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DomainsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = DomainsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DomainsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DomainsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = DomainsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DomainsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = DomainsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = DomainsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DomainsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DomainsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = DomainsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DomainsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DomainsTransport, "_prep_wrapped_messages" + ) as prep: + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DomainsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DomainsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DomainsClient, transports.DomainsGrpcTransport), + (DomainsAsyncClient, transports.DomainsGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/__init__.py b/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/test_domains.py b/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/test_domains.py new file mode 100644 index 000000000000..618b38d5f8e9 --- /dev/null +++ b/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/test_domains.py @@ -0,0 +1,9877 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import money_pb2 # type: ignore +from google.type import postal_address_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.domains_v1beta1.services.domains import ( + DomainsAsyncClient, + DomainsClient, + pagers, + transports, +) +from google.cloud.domains_v1beta1.types import domains + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DomainsClient._get_default_mtls_endpoint(None) is None + assert DomainsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + DomainsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + ) + assert ( + DomainsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DomainsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert DomainsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DomainsClient, "grpc"), + (DomainsAsyncClient, "grpc_asyncio"), + (DomainsClient, "rest"), + ], +) +def test_domains_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "domains.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://domains.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DomainsGrpcTransport, "grpc"), + (transports.DomainsGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DomainsRestTransport, "rest"), + ], +) +def test_domains_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DomainsClient, "grpc"), + (DomainsAsyncClient, "grpc_asyncio"), + (DomainsClient, "rest"), + ], +) +def test_domains_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "domains.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://domains.googleapis.com" + ) + + +def test_domains_client_get_transport_class(): + transport = DomainsClient.get_transport_class() + available_transports = [ + transports.DomainsGrpcTransport, + transports.DomainsRestTransport, + ] + assert transport in available_transports + + transport = DomainsClient.get_transport_class("grpc") + assert transport == transports.DomainsGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DomainsClient, transports.DomainsGrpcTransport, "grpc"), + (DomainsAsyncClient, transports.DomainsGrpcAsyncIOTransport, "grpc_asyncio"), + (DomainsClient, transports.DomainsRestTransport, "rest"), + ], +) +@mock.patch.object( + DomainsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DomainsClient) +) +@mock.patch.object( + DomainsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DomainsAsyncClient) +) +def test_domains_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DomainsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DomainsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (DomainsClient, transports.DomainsGrpcTransport, "grpc", "true"), + ( + DomainsAsyncClient, + transports.DomainsGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (DomainsClient, transports.DomainsGrpcTransport, "grpc", "false"), + ( + DomainsAsyncClient, + transports.DomainsGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (DomainsClient, transports.DomainsRestTransport, "rest", "true"), + (DomainsClient, transports.DomainsRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + DomainsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DomainsClient) +) +@mock.patch.object( + DomainsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DomainsAsyncClient) +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_domains_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [DomainsClient, DomainsAsyncClient]) +@mock.patch.object( + DomainsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DomainsClient) +) +@mock.patch.object( + DomainsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DomainsAsyncClient) +) +def test_domains_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DomainsClient, transports.DomainsGrpcTransport, "grpc"), + (DomainsAsyncClient, transports.DomainsGrpcAsyncIOTransport, "grpc_asyncio"), + (DomainsClient, transports.DomainsRestTransport, "rest"), + ], +) +def test_domains_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (DomainsClient, transports.DomainsGrpcTransport, "grpc", grpc_helpers), + ( + DomainsAsyncClient, + transports.DomainsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (DomainsClient, transports.DomainsRestTransport, "rest", None), + ], +) +def test_domains_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_domains_client_client_options_from_dict(): + with mock.patch( + "google.cloud.domains_v1beta1.services.domains.transports.DomainsGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DomainsClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (DomainsClient, transports.DomainsGrpcTransport, "grpc", grpc_helpers), + ( + DomainsAsyncClient, + transports.DomainsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_domains_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "domains.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="domains.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.SearchDomainsRequest, + dict, + ], +) +def test_search_domains(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_domains), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = domains.SearchDomainsResponse() + response = client.search_domains(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.SearchDomainsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.SearchDomainsResponse) + + +def test_search_domains_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_domains), "__call__") as call: + client.search_domains() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.SearchDomainsRequest() + + +@pytest.mark.asyncio +async def test_search_domains_async( + transport: str = "grpc_asyncio", request_type=domains.SearchDomainsRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_domains), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.SearchDomainsResponse() + ) + response = await client.search_domains(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.SearchDomainsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.SearchDomainsResponse) + + +@pytest.mark.asyncio +async def test_search_domains_async_from_dict(): + await test_search_domains_async(request_type=dict) + + +def test_search_domains_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.SearchDomainsRequest() + + request.location = "location_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_domains), "__call__") as call: + call.return_value = domains.SearchDomainsResponse() + client.search_domains(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_search_domains_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.SearchDomainsRequest() + + request.location = "location_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_domains), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.SearchDomainsResponse() + ) + await client.search_domains(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +def test_search_domains_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_domains), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = domains.SearchDomainsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.search_domains( + location="location_value", + query="query_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + arg = args[0].query + mock_val = "query_value" + assert arg == mock_val + + +def test_search_domains_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_domains( + domains.SearchDomainsRequest(), + location="location_value", + query="query_value", + ) + + +@pytest.mark.asyncio +async def test_search_domains_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_domains), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = domains.SearchDomainsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.SearchDomainsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.search_domains( + location="location_value", + query="query_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + arg = args[0].query + mock_val = "query_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_search_domains_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.search_domains( + domains.SearchDomainsRequest(), + location="location_value", + query="query_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RetrieveRegisterParametersRequest, + dict, + ], +) +def test_retrieve_register_parameters(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_register_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.RetrieveRegisterParametersResponse() + response = client.retrieve_register_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveRegisterParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.RetrieveRegisterParametersResponse) + + +def test_retrieve_register_parameters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_register_parameters), "__call__" + ) as call: + client.retrieve_register_parameters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveRegisterParametersRequest() + + +@pytest.mark.asyncio +async def test_retrieve_register_parameters_async( + transport: str = "grpc_asyncio", + request_type=domains.RetrieveRegisterParametersRequest, +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_register_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.RetrieveRegisterParametersResponse() + ) + response = await client.retrieve_register_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveRegisterParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.RetrieveRegisterParametersResponse) + + +@pytest.mark.asyncio +async def test_retrieve_register_parameters_async_from_dict(): + await test_retrieve_register_parameters_async(request_type=dict) + + +def test_retrieve_register_parameters_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RetrieveRegisterParametersRequest() + + request.location = "location_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_register_parameters), "__call__" + ) as call: + call.return_value = domains.RetrieveRegisterParametersResponse() + client.retrieve_register_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_retrieve_register_parameters_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RetrieveRegisterParametersRequest() + + request.location = "location_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_register_parameters), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.RetrieveRegisterParametersResponse() + ) + await client.retrieve_register_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +def test_retrieve_register_parameters_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_register_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.RetrieveRegisterParametersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.retrieve_register_parameters( + location="location_value", + domain_name="domain_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + arg = args[0].domain_name + mock_val = "domain_name_value" + assert arg == mock_val + + +def test_retrieve_register_parameters_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retrieve_register_parameters( + domains.RetrieveRegisterParametersRequest(), + location="location_value", + domain_name="domain_name_value", + ) + + +@pytest.mark.asyncio +async def test_retrieve_register_parameters_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_register_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.RetrieveRegisterParametersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.RetrieveRegisterParametersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.retrieve_register_parameters( + location="location_value", + domain_name="domain_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + arg = args[0].domain_name + mock_val = "domain_name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_retrieve_register_parameters_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.retrieve_register_parameters( + domains.RetrieveRegisterParametersRequest(), + location="location_value", + domain_name="domain_name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RegisterDomainRequest, + dict, + ], +) +def test_register_domain(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.register_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.register_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RegisterDomainRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_register_domain_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.register_domain), "__call__") as call: + client.register_domain() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RegisterDomainRequest() + + +@pytest.mark.asyncio +async def test_register_domain_async( + transport: str = "grpc_asyncio", request_type=domains.RegisterDomainRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.register_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.register_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RegisterDomainRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_register_domain_async_from_dict(): + await test_register_domain_async(request_type=dict) + + +def test_register_domain_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RegisterDomainRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.register_domain), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.register_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_register_domain_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RegisterDomainRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.register_domain), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.register_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_register_domain_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.register_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.register_domain( + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].registration + mock_val = domains.Registration(name="name_value") + assert arg == mock_val + arg = args[0].yearly_price + mock_val = money_pb2.Money(currency_code="currency_code_value") + assert arg == mock_val + + +def test_register_domain_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.register_domain( + domains.RegisterDomainRequest(), + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + ) + + +@pytest.mark.asyncio +async def test_register_domain_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.register_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.register_domain( + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].registration + mock_val = domains.Registration(name="name_value") + assert arg == mock_val + arg = args[0].yearly_price + mock_val = money_pb2.Money(currency_code="currency_code_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_register_domain_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.register_domain( + domains.RegisterDomainRequest(), + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RetrieveTransferParametersRequest, + dict, + ], +) +def test_retrieve_transfer_parameters(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_transfer_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.RetrieveTransferParametersResponse() + response = client.retrieve_transfer_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveTransferParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.RetrieveTransferParametersResponse) + + +def test_retrieve_transfer_parameters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_transfer_parameters), "__call__" + ) as call: + client.retrieve_transfer_parameters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveTransferParametersRequest() + + +@pytest.mark.asyncio +async def test_retrieve_transfer_parameters_async( + transport: str = "grpc_asyncio", + request_type=domains.RetrieveTransferParametersRequest, +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_transfer_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.RetrieveTransferParametersResponse() + ) + response = await client.retrieve_transfer_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveTransferParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.RetrieveTransferParametersResponse) + + +@pytest.mark.asyncio +async def test_retrieve_transfer_parameters_async_from_dict(): + await test_retrieve_transfer_parameters_async(request_type=dict) + + +def test_retrieve_transfer_parameters_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RetrieveTransferParametersRequest() + + request.location = "location_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_transfer_parameters), "__call__" + ) as call: + call.return_value = domains.RetrieveTransferParametersResponse() + client.retrieve_transfer_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_retrieve_transfer_parameters_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RetrieveTransferParametersRequest() + + request.location = "location_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_transfer_parameters), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.RetrieveTransferParametersResponse() + ) + await client.retrieve_transfer_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +def test_retrieve_transfer_parameters_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_transfer_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.RetrieveTransferParametersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.retrieve_transfer_parameters( + location="location_value", + domain_name="domain_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + arg = args[0].domain_name + mock_val = "domain_name_value" + assert arg == mock_val + + +def test_retrieve_transfer_parameters_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retrieve_transfer_parameters( + domains.RetrieveTransferParametersRequest(), + location="location_value", + domain_name="domain_name_value", + ) + + +@pytest.mark.asyncio +async def test_retrieve_transfer_parameters_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_transfer_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.RetrieveTransferParametersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.RetrieveTransferParametersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.retrieve_transfer_parameters( + location="location_value", + domain_name="domain_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + arg = args[0].domain_name + mock_val = "domain_name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_retrieve_transfer_parameters_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.retrieve_transfer_parameters( + domains.RetrieveTransferParametersRequest(), + location="location_value", + domain_name="domain_name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.TransferDomainRequest, + dict, + ], +) +def test_transfer_domain(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.transfer_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.transfer_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.TransferDomainRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_transfer_domain_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.transfer_domain), "__call__") as call: + client.transfer_domain() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.TransferDomainRequest() + + +@pytest.mark.asyncio +async def test_transfer_domain_async( + transport: str = "grpc_asyncio", request_type=domains.TransferDomainRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.transfer_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.transfer_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.TransferDomainRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_transfer_domain_async_from_dict(): + await test_transfer_domain_async(request_type=dict) + + +def test_transfer_domain_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.TransferDomainRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.transfer_domain), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.transfer_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_transfer_domain_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.TransferDomainRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.transfer_domain), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.transfer_domain(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_transfer_domain_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.transfer_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.transfer_domain( + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + authorization_code=domains.AuthorizationCode(code="code_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].registration + mock_val = domains.Registration(name="name_value") + assert arg == mock_val + arg = args[0].yearly_price + mock_val = money_pb2.Money(currency_code="currency_code_value") + assert arg == mock_val + arg = args[0].authorization_code + mock_val = domains.AuthorizationCode(code="code_value") + assert arg == mock_val + + +def test_transfer_domain_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.transfer_domain( + domains.TransferDomainRequest(), + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + authorization_code=domains.AuthorizationCode(code="code_value"), + ) + + +@pytest.mark.asyncio +async def test_transfer_domain_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.transfer_domain), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.transfer_domain( + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + authorization_code=domains.AuthorizationCode(code="code_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].registration + mock_val = domains.Registration(name="name_value") + assert arg == mock_val + arg = args[0].yearly_price + mock_val = money_pb2.Money(currency_code="currency_code_value") + assert arg == mock_val + arg = args[0].authorization_code + mock_val = domains.AuthorizationCode(code="code_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_transfer_domain_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.transfer_domain( + domains.TransferDomainRequest(), + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + authorization_code=domains.AuthorizationCode(code="code_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ListRegistrationsRequest, + dict, + ], +) +def test_list_registrations(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.ListRegistrationsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_registrations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ListRegistrationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRegistrationsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_registrations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + client.list_registrations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ListRegistrationsRequest() + + +@pytest.mark.asyncio +async def test_list_registrations_async( + transport: str = "grpc_asyncio", request_type=domains.ListRegistrationsRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.ListRegistrationsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_registrations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ListRegistrationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRegistrationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_registrations_async_from_dict(): + await test_list_registrations_async(request_type=dict) + + +def test_list_registrations_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ListRegistrationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + call.return_value = domains.ListRegistrationsResponse() + client.list_registrations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_registrations_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ListRegistrationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.ListRegistrationsResponse() + ) + await client.list_registrations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_registrations_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.ListRegistrationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_registrations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_registrations_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_registrations( + domains.ListRegistrationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_registrations_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.ListRegistrationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.ListRegistrationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_registrations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_registrations_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_registrations( + domains.ListRegistrationsRequest(), + parent="parent_value", + ) + + +def test_list_registrations_pager(transport_name: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + domains.Registration(), + ], + next_page_token="abc", + ), + domains.ListRegistrationsResponse( + registrations=[], + next_page_token="def", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + ], + next_page_token="ghi", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_registrations(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, domains.Registration) for i in results) + + +def test_list_registrations_pages(transport_name: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + domains.Registration(), + ], + next_page_token="abc", + ), + domains.ListRegistrationsResponse( + registrations=[], + next_page_token="def", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + ], + next_page_token="ghi", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + ], + ), + RuntimeError, + ) + pages = list(client.list_registrations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_registrations_async_pager(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + domains.Registration(), + ], + next_page_token="abc", + ), + domains.ListRegistrationsResponse( + registrations=[], + next_page_token="def", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + ], + next_page_token="ghi", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_registrations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, domains.Registration) for i in responses) + + +@pytest.mark.asyncio +async def test_list_registrations_async_pages(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_registrations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + domains.Registration(), + ], + next_page_token="abc", + ), + domains.ListRegistrationsResponse( + registrations=[], + next_page_token="def", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + ], + next_page_token="ghi", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_registrations(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + domains.GetRegistrationRequest, + dict, + ], +) +def test_get_registration(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_registration), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = domains.Registration( + name="name_value", + domain_name="domain_name_value", + state=domains.Registration.State.REGISTRATION_PENDING, + issues=[domains.Registration.Issue.CONTACT_SUPPORT], + supported_privacy=[domains.ContactPrivacy.PUBLIC_CONTACT_DATA], + ) + response = client.get_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.GetRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.Registration) + assert response.name == "name_value" + assert response.domain_name == "domain_name_value" + assert response.state == domains.Registration.State.REGISTRATION_PENDING + assert response.issues == [domains.Registration.Issue.CONTACT_SUPPORT] + assert response.supported_privacy == [domains.ContactPrivacy.PUBLIC_CONTACT_DATA] + + +def test_get_registration_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_registration), "__call__") as call: + client.get_registration() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.GetRegistrationRequest() + + +@pytest.mark.asyncio +async def test_get_registration_async( + transport: str = "grpc_asyncio", request_type=domains.GetRegistrationRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_registration), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.Registration( + name="name_value", + domain_name="domain_name_value", + state=domains.Registration.State.REGISTRATION_PENDING, + issues=[domains.Registration.Issue.CONTACT_SUPPORT], + supported_privacy=[domains.ContactPrivacy.PUBLIC_CONTACT_DATA], + ) + ) + response = await client.get_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.GetRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.Registration) + assert response.name == "name_value" + assert response.domain_name == "domain_name_value" + assert response.state == domains.Registration.State.REGISTRATION_PENDING + assert response.issues == [domains.Registration.Issue.CONTACT_SUPPORT] + assert response.supported_privacy == [domains.ContactPrivacy.PUBLIC_CONTACT_DATA] + + +@pytest.mark.asyncio +async def test_get_registration_async_from_dict(): + await test_get_registration_async(request_type=dict) + + +def test_get_registration_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.GetRegistrationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_registration), "__call__") as call: + call.return_value = domains.Registration() + client.get_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_registration_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.GetRegistrationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_registration), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.Registration() + ) + await client.get_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_registration_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_registration), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = domains.Registration() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_registration( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_registration_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_registration( + domains.GetRegistrationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_registration_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_registration), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = domains.Registration() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.Registration() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_registration( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_registration_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_registration( + domains.GetRegistrationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.UpdateRegistrationRequest, + dict, + ], +) +def test_update_registration(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.UpdateRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_registration_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_registration), "__call__" + ) as call: + client.update_registration() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.UpdateRegistrationRequest() + + +@pytest.mark.asyncio +async def test_update_registration_async( + transport: str = "grpc_asyncio", request_type=domains.UpdateRegistrationRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.UpdateRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_registration_async_from_dict(): + await test_update_registration_async(request_type=dict) + + +def test_update_registration_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.UpdateRegistrationRequest() + + request.registration.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_registration), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_registration_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.UpdateRegistrationRequest() + + request.registration.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_registration), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration.name=name_value", + ) in kw["metadata"] + + +def test_update_registration_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_registration( + registration=domains.Registration(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = domains.Registration(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_registration_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_registration( + domains.UpdateRegistrationRequest(), + registration=domains.Registration(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_registration_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_registration( + registration=domains.Registration(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = domains.Registration(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_registration_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_registration( + domains.UpdateRegistrationRequest(), + registration=domains.Registration(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ConfigureManagementSettingsRequest, + dict, + ], +) +def test_configure_management_settings(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_management_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.configure_management_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureManagementSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_configure_management_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_management_settings), "__call__" + ) as call: + client.configure_management_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureManagementSettingsRequest() + + +@pytest.mark.asyncio +async def test_configure_management_settings_async( + transport: str = "grpc_asyncio", + request_type=domains.ConfigureManagementSettingsRequest, +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_management_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.configure_management_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureManagementSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_configure_management_settings_async_from_dict(): + await test_configure_management_settings_async(request_type=dict) + + +def test_configure_management_settings_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ConfigureManagementSettingsRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_management_settings), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.configure_management_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_configure_management_settings_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ConfigureManagementSettingsRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_management_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.configure_management_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +def test_configure_management_settings_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_management_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.configure_management_settings( + registration="registration_value", + management_settings=domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + arg = args[0].management_settings + mock_val = domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_configure_management_settings_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.configure_management_settings( + domains.ConfigureManagementSettingsRequest(), + registration="registration_value", + management_settings=domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_configure_management_settings_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_management_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.configure_management_settings( + registration="registration_value", + management_settings=domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + arg = args[0].management_settings + mock_val = domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_configure_management_settings_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.configure_management_settings( + domains.ConfigureManagementSettingsRequest(), + registration="registration_value", + management_settings=domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ConfigureDnsSettingsRequest, + dict, + ], +) +def test_configure_dns_settings(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_dns_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.configure_dns_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureDnsSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_configure_dns_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_dns_settings), "__call__" + ) as call: + client.configure_dns_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureDnsSettingsRequest() + + +@pytest.mark.asyncio +async def test_configure_dns_settings_async( + transport: str = "grpc_asyncio", request_type=domains.ConfigureDnsSettingsRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_dns_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.configure_dns_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureDnsSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_configure_dns_settings_async_from_dict(): + await test_configure_dns_settings_async(request_type=dict) + + +def test_configure_dns_settings_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ConfigureDnsSettingsRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_dns_settings), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.configure_dns_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_configure_dns_settings_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ConfigureDnsSettingsRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_dns_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.configure_dns_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +def test_configure_dns_settings_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_dns_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.configure_dns_settings( + registration="registration_value", + dns_settings=domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + arg = args[0].dns_settings + mock_val = domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_configure_dns_settings_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.configure_dns_settings( + domains.ConfigureDnsSettingsRequest(), + registration="registration_value", + dns_settings=domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_configure_dns_settings_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_dns_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.configure_dns_settings( + registration="registration_value", + dns_settings=domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + arg = args[0].dns_settings + mock_val = domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_configure_dns_settings_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.configure_dns_settings( + domains.ConfigureDnsSettingsRequest(), + registration="registration_value", + dns_settings=domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ConfigureContactSettingsRequest, + dict, + ], +) +def test_configure_contact_settings(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_contact_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.configure_contact_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureContactSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_configure_contact_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_contact_settings), "__call__" + ) as call: + client.configure_contact_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureContactSettingsRequest() + + +@pytest.mark.asyncio +async def test_configure_contact_settings_async( + transport: str = "grpc_asyncio", + request_type=domains.ConfigureContactSettingsRequest, +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_contact_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.configure_contact_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ConfigureContactSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_configure_contact_settings_async_from_dict(): + await test_configure_contact_settings_async(request_type=dict) + + +def test_configure_contact_settings_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ConfigureContactSettingsRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_contact_settings), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.configure_contact_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_configure_contact_settings_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ConfigureContactSettingsRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_contact_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.configure_contact_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +def test_configure_contact_settings_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_contact_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.configure_contact_settings( + registration="registration_value", + contact_settings=domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + arg = args[0].contact_settings + mock_val = domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_configure_contact_settings_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.configure_contact_settings( + domains.ConfigureContactSettingsRequest(), + registration="registration_value", + contact_settings=domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_configure_contact_settings_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.configure_contact_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.configure_contact_settings( + registration="registration_value", + contact_settings=domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + arg = args[0].contact_settings + mock_val = domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_configure_contact_settings_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.configure_contact_settings( + domains.ConfigureContactSettingsRequest(), + registration="registration_value", + contact_settings=domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ExportRegistrationRequest, + dict, + ], +) +def test_export_registration(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.export_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ExportRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_registration_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_registration), "__call__" + ) as call: + client.export_registration() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ExportRegistrationRequest() + + +@pytest.mark.asyncio +async def test_export_registration_async( + transport: str = "grpc_asyncio", request_type=domains.ExportRegistrationRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ExportRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_export_registration_async_from_dict(): + await test_export_registration_async(request_type=dict) + + +def test_export_registration_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ExportRegistrationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_registration), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_registration_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ExportRegistrationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_registration), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.export_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_export_registration_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.export_registration( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_export_registration_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_registration( + domains.ExportRegistrationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_export_registration_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.export_registration( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_export_registration_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.export_registration( + domains.ExportRegistrationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.DeleteRegistrationRequest, + dict, + ], +) +def test_delete_registration(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.DeleteRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_registration_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_registration), "__call__" + ) as call: + client.delete_registration() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.DeleteRegistrationRequest() + + +@pytest.mark.asyncio +async def test_delete_registration_async( + transport: str = "grpc_asyncio", request_type=domains.DeleteRegistrationRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.DeleteRegistrationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_registration_async_from_dict(): + await test_delete_registration_async(request_type=dict) + + +def test_delete_registration_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.DeleteRegistrationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_registration), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_registration_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.DeleteRegistrationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_registration), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_registration(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_registration_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_registration( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_registration_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_registration( + domains.DeleteRegistrationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_registration_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_registration), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_registration( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_registration_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_registration( + domains.DeleteRegistrationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RetrieveAuthorizationCodeRequest, + dict, + ], +) +def test_retrieve_authorization_code(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.AuthorizationCode( + code="code_value", + ) + response = client.retrieve_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveAuthorizationCodeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.AuthorizationCode) + assert response.code == "code_value" + + +def test_retrieve_authorization_code_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_authorization_code), "__call__" + ) as call: + client.retrieve_authorization_code() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveAuthorizationCodeRequest() + + +@pytest.mark.asyncio +async def test_retrieve_authorization_code_async( + transport: str = "grpc_asyncio", + request_type=domains.RetrieveAuthorizationCodeRequest, +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.AuthorizationCode( + code="code_value", + ) + ) + response = await client.retrieve_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.RetrieveAuthorizationCodeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.AuthorizationCode) + assert response.code == "code_value" + + +@pytest.mark.asyncio +async def test_retrieve_authorization_code_async_from_dict(): + await test_retrieve_authorization_code_async(request_type=dict) + + +def test_retrieve_authorization_code_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RetrieveAuthorizationCodeRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_authorization_code), "__call__" + ) as call: + call.return_value = domains.AuthorizationCode() + client.retrieve_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_retrieve_authorization_code_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.RetrieveAuthorizationCodeRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_authorization_code), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.AuthorizationCode() + ) + await client.retrieve_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +def test_retrieve_authorization_code_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.AuthorizationCode() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.retrieve_authorization_code( + registration="registration_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + + +def test_retrieve_authorization_code_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retrieve_authorization_code( + domains.RetrieveAuthorizationCodeRequest(), + registration="registration_value", + ) + + +@pytest.mark.asyncio +async def test_retrieve_authorization_code_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.AuthorizationCode() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.AuthorizationCode() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.retrieve_authorization_code( + registration="registration_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_retrieve_authorization_code_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.retrieve_authorization_code( + domains.RetrieveAuthorizationCodeRequest(), + registration="registration_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ResetAuthorizationCodeRequest, + dict, + ], +) +def test_reset_authorization_code(request_type, transport: str = "grpc"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.AuthorizationCode( + code="code_value", + ) + response = client.reset_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ResetAuthorizationCodeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.AuthorizationCode) + assert response.code == "code_value" + + +def test_reset_authorization_code_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_authorization_code), "__call__" + ) as call: + client.reset_authorization_code() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ResetAuthorizationCodeRequest() + + +@pytest.mark.asyncio +async def test_reset_authorization_code_async( + transport: str = "grpc_asyncio", request_type=domains.ResetAuthorizationCodeRequest +): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.AuthorizationCode( + code="code_value", + ) + ) + response = await client.reset_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == domains.ResetAuthorizationCodeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.AuthorizationCode) + assert response.code == "code_value" + + +@pytest.mark.asyncio +async def test_reset_authorization_code_async_from_dict(): + await test_reset_authorization_code_async(request_type=dict) + + +def test_reset_authorization_code_field_headers(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ResetAuthorizationCodeRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_authorization_code), "__call__" + ) as call: + call.return_value = domains.AuthorizationCode() + client.reset_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_reset_authorization_code_field_headers_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = domains.ResetAuthorizationCodeRequest() + + request.registration = "registration_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_authorization_code), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.AuthorizationCode() + ) + await client.reset_authorization_code(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "registration=registration_value", + ) in kw["metadata"] + + +def test_reset_authorization_code_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.AuthorizationCode() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.reset_authorization_code( + registration="registration_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + + +def test_reset_authorization_code_flattened_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reset_authorization_code( + domains.ResetAuthorizationCodeRequest(), + registration="registration_value", + ) + + +@pytest.mark.asyncio +async def test_reset_authorization_code_flattened_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_authorization_code), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = domains.AuthorizationCode() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + domains.AuthorizationCode() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.reset_authorization_code( + registration="registration_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].registration + mock_val = "registration_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_reset_authorization_code_flattened_error_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.reset_authorization_code( + domains.ResetAuthorizationCodeRequest(), + registration="registration_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.SearchDomainsRequest, + dict, + ], +) +def test_search_domains_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.SearchDomainsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.SearchDomainsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.search_domains(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.SearchDomainsResponse) + + +def test_search_domains_rest_required_fields(request_type=domains.SearchDomainsRequest): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["query"] = "" + request_init["location"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "query" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_domains._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "query" in jsonified_request + assert jsonified_request["query"] == request_init["query"] + + jsonified_request["query"] = "query_value" + jsonified_request["location"] = "location_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_domains._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("query",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" + assert "location" in jsonified_request + assert jsonified_request["location"] == "location_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = domains.SearchDomainsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = domains.SearchDomainsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.search_domains(request) + + expected_params = [ + ( + "query", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_search_domains_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.search_domains._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("query",)) + & set( + ( + "query", + "location", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_domains_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DomainsRestInterceptor, "post_search_domains" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_search_domains" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.SearchDomainsRequest.pb(domains.SearchDomainsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = domains.SearchDomainsResponse.to_json( + domains.SearchDomainsResponse() + ) + + request = domains.SearchDomainsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = domains.SearchDomainsResponse() + + client.search_domains( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_domains_rest_bad_request( + transport: str = "rest", request_type=domains.SearchDomainsRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_domains(request) + + +def test_search_domains_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.SearchDomainsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"location": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + location="location_value", + query="query_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.SearchDomainsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.search_domains(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{location=projects/*/locations/*}/registrations:searchDomains" + % client.transport._host, + args[1], + ) + + +def test_search_domains_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_domains( + domains.SearchDomainsRequest(), + location="location_value", + query="query_value", + ) + + +def test_search_domains_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RetrieveRegisterParametersRequest, + dict, + ], +) +def test_retrieve_register_parameters_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.RetrieveRegisterParametersResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.RetrieveRegisterParametersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.retrieve_register_parameters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.RetrieveRegisterParametersResponse) + + +def test_retrieve_register_parameters_rest_required_fields( + request_type=domains.RetrieveRegisterParametersRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["domain_name"] = "" + request_init["location"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "domainName" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_register_parameters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "domainName" in jsonified_request + assert jsonified_request["domainName"] == request_init["domain_name"] + + jsonified_request["domainName"] = "domain_name_value" + jsonified_request["location"] = "location_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_register_parameters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("domain_name",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "domainName" in jsonified_request + assert jsonified_request["domainName"] == "domain_name_value" + assert "location" in jsonified_request + assert jsonified_request["location"] == "location_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = domains.RetrieveRegisterParametersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = domains.RetrieveRegisterParametersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.retrieve_register_parameters(request) + + expected_params = [ + ( + "domainName", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_retrieve_register_parameters_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.retrieve_register_parameters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("domainName",)) + & set( + ( + "domainName", + "location", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_retrieve_register_parameters_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DomainsRestInterceptor, "post_retrieve_register_parameters" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_retrieve_register_parameters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.RetrieveRegisterParametersRequest.pb( + domains.RetrieveRegisterParametersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = domains.RetrieveRegisterParametersResponse.to_json( + domains.RetrieveRegisterParametersResponse() + ) + + request = domains.RetrieveRegisterParametersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = domains.RetrieveRegisterParametersResponse() + + client.retrieve_register_parameters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_retrieve_register_parameters_rest_bad_request( + transport: str = "rest", request_type=domains.RetrieveRegisterParametersRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.retrieve_register_parameters(request) + + +def test_retrieve_register_parameters_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.RetrieveRegisterParametersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"location": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + location="location_value", + domain_name="domain_name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.RetrieveRegisterParametersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.retrieve_register_parameters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{location=projects/*/locations/*}/registrations:retrieveRegisterParameters" + % client.transport._host, + args[1], + ) + + +def test_retrieve_register_parameters_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retrieve_register_parameters( + domains.RetrieveRegisterParametersRequest(), + location="location_value", + domain_name="domain_name_value", + ) + + +def test_retrieve_register_parameters_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RegisterDomainRequest, + dict, + ], +) +def test_register_domain_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.register_domain(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_register_domain_rest_required_fields( + request_type=domains.RegisterDomainRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).register_domain._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).register_domain._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.register_domain(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_register_domain_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.register_domain._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "registration", + "yearlyPrice", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_register_domain_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_register_domain" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_register_domain" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.RegisterDomainRequest.pb(domains.RegisterDomainRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.RegisterDomainRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.register_domain( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_register_domain_rest_bad_request( + transport: str = "rest", request_type=domains.RegisterDomainRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.register_domain(request) + + +def test_register_domain_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.register_domain(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{parent=projects/*/locations/*}/registrations:register" + % client.transport._host, + args[1], + ) + + +def test_register_domain_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.register_domain( + domains.RegisterDomainRequest(), + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + ) + + +def test_register_domain_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RetrieveTransferParametersRequest, + dict, + ], +) +def test_retrieve_transfer_parameters_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.RetrieveTransferParametersResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.RetrieveTransferParametersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.retrieve_transfer_parameters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.RetrieveTransferParametersResponse) + + +def test_retrieve_transfer_parameters_rest_required_fields( + request_type=domains.RetrieveTransferParametersRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["domain_name"] = "" + request_init["location"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "domainName" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_transfer_parameters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "domainName" in jsonified_request + assert jsonified_request["domainName"] == request_init["domain_name"] + + jsonified_request["domainName"] = "domain_name_value" + jsonified_request["location"] = "location_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_transfer_parameters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("domain_name",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "domainName" in jsonified_request + assert jsonified_request["domainName"] == "domain_name_value" + assert "location" in jsonified_request + assert jsonified_request["location"] == "location_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = domains.RetrieveTransferParametersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = domains.RetrieveTransferParametersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.retrieve_transfer_parameters(request) + + expected_params = [ + ( + "domainName", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_retrieve_transfer_parameters_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.retrieve_transfer_parameters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("domainName",)) + & set( + ( + "domainName", + "location", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_retrieve_transfer_parameters_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DomainsRestInterceptor, "post_retrieve_transfer_parameters" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_retrieve_transfer_parameters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.RetrieveTransferParametersRequest.pb( + domains.RetrieveTransferParametersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = domains.RetrieveTransferParametersResponse.to_json( + domains.RetrieveTransferParametersResponse() + ) + + request = domains.RetrieveTransferParametersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = domains.RetrieveTransferParametersResponse() + + client.retrieve_transfer_parameters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_retrieve_transfer_parameters_rest_bad_request( + transport: str = "rest", request_type=domains.RetrieveTransferParametersRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.retrieve_transfer_parameters(request) + + +def test_retrieve_transfer_parameters_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.RetrieveTransferParametersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"location": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + location="location_value", + domain_name="domain_name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.RetrieveTransferParametersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.retrieve_transfer_parameters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{location=projects/*/locations/*}/registrations:retrieveTransferParameters" + % client.transport._host, + args[1], + ) + + +def test_retrieve_transfer_parameters_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retrieve_transfer_parameters( + domains.RetrieveTransferParametersRequest(), + location="location_value", + domain_name="domain_name_value", + ) + + +def test_retrieve_transfer_parameters_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.TransferDomainRequest, + dict, + ], +) +def test_transfer_domain_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.transfer_domain(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_transfer_domain_rest_required_fields( + request_type=domains.TransferDomainRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).transfer_domain._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).transfer_domain._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.transfer_domain(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_transfer_domain_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.transfer_domain._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "registration", + "yearlyPrice", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_transfer_domain_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_transfer_domain" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_transfer_domain" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.TransferDomainRequest.pb(domains.TransferDomainRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.TransferDomainRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.transfer_domain( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_transfer_domain_rest_bad_request( + transport: str = "rest", request_type=domains.TransferDomainRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.transfer_domain(request) + + +def test_transfer_domain_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + authorization_code=domains.AuthorizationCode(code="code_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.transfer_domain(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{parent=projects/*/locations/*}/registrations:transfer" + % client.transport._host, + args[1], + ) + + +def test_transfer_domain_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.transfer_domain( + domains.TransferDomainRequest(), + parent="parent_value", + registration=domains.Registration(name="name_value"), + yearly_price=money_pb2.Money(currency_code="currency_code_value"), + authorization_code=domains.AuthorizationCode(code="code_value"), + ) + + +def test_transfer_domain_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ListRegistrationsRequest, + dict, + ], +) +def test_list_registrations_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.ListRegistrationsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.ListRegistrationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_registrations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRegistrationsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_registrations_rest_required_fields( + request_type=domains.ListRegistrationsRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_registrations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_registrations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = domains.ListRegistrationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = domains.ListRegistrationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_registrations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_registrations_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_registrations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_registrations_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DomainsRestInterceptor, "post_list_registrations" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_list_registrations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.ListRegistrationsRequest.pb( + domains.ListRegistrationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = domains.ListRegistrationsResponse.to_json( + domains.ListRegistrationsResponse() + ) + + request = domains.ListRegistrationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = domains.ListRegistrationsResponse() + + client.list_registrations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_registrations_rest_bad_request( + transport: str = "rest", request_type=domains.ListRegistrationsRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_registrations(request) + + +def test_list_registrations_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.ListRegistrationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.ListRegistrationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_registrations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{parent=projects/*/locations/*}/registrations" + % client.transport._host, + args[1], + ) + + +def test_list_registrations_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_registrations( + domains.ListRegistrationsRequest(), + parent="parent_value", + ) + + +def test_list_registrations_rest_pager(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + domains.Registration(), + ], + next_page_token="abc", + ), + domains.ListRegistrationsResponse( + registrations=[], + next_page_token="def", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + ], + next_page_token="ghi", + ), + domains.ListRegistrationsResponse( + registrations=[ + domains.Registration(), + domains.Registration(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(domains.ListRegistrationsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_registrations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, domains.Registration) for i in results) + + pages = list(client.list_registrations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + domains.GetRegistrationRequest, + dict, + ], +) +def test_get_registration_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/registrations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.Registration( + name="name_value", + domain_name="domain_name_value", + state=domains.Registration.State.REGISTRATION_PENDING, + issues=[domains.Registration.Issue.CONTACT_SUPPORT], + supported_privacy=[domains.ContactPrivacy.PUBLIC_CONTACT_DATA], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.Registration.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_registration(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.Registration) + assert response.name == "name_value" + assert response.domain_name == "domain_name_value" + assert response.state == domains.Registration.State.REGISTRATION_PENDING + assert response.issues == [domains.Registration.Issue.CONTACT_SUPPORT] + assert response.supported_privacy == [domains.ContactPrivacy.PUBLIC_CONTACT_DATA] + + +def test_get_registration_rest_required_fields( + request_type=domains.GetRegistrationRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = domains.Registration() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = domains.Registration.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_registration(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_registration_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_registration._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_registration_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DomainsRestInterceptor, "post_get_registration" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_get_registration" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.GetRegistrationRequest.pb(domains.GetRegistrationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = domains.Registration.to_json(domains.Registration()) + + request = domains.GetRegistrationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = domains.Registration() + + client.get_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_registration_rest_bad_request( + transport: str = "rest", request_type=domains.GetRegistrationRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/registrations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_registration(request) + + +def test_get_registration_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.Registration() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.Registration.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_registration(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{name=projects/*/locations/*/registrations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_registration_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_registration( + domains.GetRegistrationRequest(), + name="name_value", + ) + + +def test_get_registration_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.UpdateRegistrationRequest, + dict, + ], +) +def test_update_registration_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": { + "name": "projects/sample1/locations/sample2/registrations/sample3" + } + } + request_init["registration"] = { + "name": "projects/sample1/locations/sample2/registrations/sample3", + "domain_name": "domain_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "expire_time": {}, + "state": 1, + "issues": [1], + "labels": {}, + "management_settings": {"renewal_method": 1, "transfer_lock_state": 1}, + "dns_settings": { + "custom_dns": { + "name_servers": ["name_servers_value1", "name_servers_value2"], + "ds_records": [ + { + "key_tag": 740, + "algorithm": 1, + "digest_type": 1, + "digest": "digest_value", + } + ], + }, + "google_domains_dns": { + "name_servers": ["name_servers_value1", "name_servers_value2"], + "ds_state": 1, + "ds_records": {}, + }, + "glue_records": [ + { + "host_name": "host_name_value", + "ipv4_addresses": [ + "ipv4_addresses_value1", + "ipv4_addresses_value2", + ], + "ipv6_addresses": [ + "ipv6_addresses_value1", + "ipv6_addresses_value2", + ], + } + ], + }, + "contact_settings": { + "privacy": 1, + "registrant_contact": { + "postal_address": { + "revision": 879, + "region_code": "region_code_value", + "language_code": "language_code_value", + "postal_code": "postal_code_value", + "sorting_code": "sorting_code_value", + "administrative_area": "administrative_area_value", + "locality": "locality_value", + "sublocality": "sublocality_value", + "address_lines": ["address_lines_value1", "address_lines_value2"], + "recipients": ["recipients_value1", "recipients_value2"], + "organization": "organization_value", + }, + "email": "email_value", + "phone_number": "phone_number_value", + "fax_number": "fax_number_value", + }, + "admin_contact": {}, + "technical_contact": {}, + }, + "pending_contact_settings": {}, + "supported_privacy": [1], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_registration(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_registration_rest_required_fields( + request_type=domains.UpdateRegistrationRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_registration._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_registration(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_registration_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_registration._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_registration_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_update_registration" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_update_registration" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.UpdateRegistrationRequest.pb( + domains.UpdateRegistrationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.UpdateRegistrationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_registration_rest_bad_request( + transport: str = "rest", request_type=domains.UpdateRegistrationRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": { + "name": "projects/sample1/locations/sample2/registrations/sample3" + } + } + request_init["registration"] = { + "name": "projects/sample1/locations/sample2/registrations/sample3", + "domain_name": "domain_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "expire_time": {}, + "state": 1, + "issues": [1], + "labels": {}, + "management_settings": {"renewal_method": 1, "transfer_lock_state": 1}, + "dns_settings": { + "custom_dns": { + "name_servers": ["name_servers_value1", "name_servers_value2"], + "ds_records": [ + { + "key_tag": 740, + "algorithm": 1, + "digest_type": 1, + "digest": "digest_value", + } + ], + }, + "google_domains_dns": { + "name_servers": ["name_servers_value1", "name_servers_value2"], + "ds_state": 1, + "ds_records": {}, + }, + "glue_records": [ + { + "host_name": "host_name_value", + "ipv4_addresses": [ + "ipv4_addresses_value1", + "ipv4_addresses_value2", + ], + "ipv6_addresses": [ + "ipv6_addresses_value1", + "ipv6_addresses_value2", + ], + } + ], + }, + "contact_settings": { + "privacy": 1, + "registrant_contact": { + "postal_address": { + "revision": 879, + "region_code": "region_code_value", + "language_code": "language_code_value", + "postal_code": "postal_code_value", + "sorting_code": "sorting_code_value", + "administrative_area": "administrative_area_value", + "locality": "locality_value", + "sublocality": "sublocality_value", + "address_lines": ["address_lines_value1", "address_lines_value2"], + "recipients": ["recipients_value1", "recipients_value2"], + "organization": "organization_value", + }, + "email": "email_value", + "phone_number": "phone_number_value", + "fax_number": "fax_number_value", + }, + "admin_contact": {}, + "technical_contact": {}, + }, + "pending_contact_settings": {}, + "supported_privacy": [1], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_registration(request) + + +def test_update_registration_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "registration": { + "name": "projects/sample1/locations/sample2/registrations/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + registration=domains.Registration(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_registration(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{registration.name=projects/*/locations/*/registrations/*}" + % client.transport._host, + args[1], + ) + + +def test_update_registration_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_registration( + domains.UpdateRegistrationRequest(), + registration=domains.Registration(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_registration_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ConfigureManagementSettingsRequest, + dict, + ], +) +def test_configure_management_settings_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.configure_management_settings(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_configure_management_settings_rest_required_fields( + request_type=domains.ConfigureManagementSettingsRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["registration"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).configure_management_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["registration"] = "registration_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).configure_management_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "registration" in jsonified_request + assert jsonified_request["registration"] == "registration_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.configure_management_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_configure_management_settings_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.configure_management_settings._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "registration", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_configure_management_settings_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_configure_management_settings" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_configure_management_settings" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.ConfigureManagementSettingsRequest.pb( + domains.ConfigureManagementSettingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.ConfigureManagementSettingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.configure_management_settings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_configure_management_settings_rest_bad_request( + transport: str = "rest", request_type=domains.ConfigureManagementSettingsRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.configure_management_settings(request) + + +def test_configure_management_settings_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + registration="registration_value", + management_settings=domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.configure_management_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{registration=projects/*/locations/*/registrations/*}:configureManagementSettings" + % client.transport._host, + args[1], + ) + + +def test_configure_management_settings_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.configure_management_settings( + domains.ConfigureManagementSettingsRequest(), + registration="registration_value", + management_settings=domains.ManagementSettings( + renewal_method=domains.ManagementSettings.RenewalMethod.AUTOMATIC_RENEWAL + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_configure_management_settings_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ConfigureDnsSettingsRequest, + dict, + ], +) +def test_configure_dns_settings_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.configure_dns_settings(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_configure_dns_settings_rest_required_fields( + request_type=domains.ConfigureDnsSettingsRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["registration"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).configure_dns_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["registration"] = "registration_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).configure_dns_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "registration" in jsonified_request + assert jsonified_request["registration"] == "registration_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.configure_dns_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_configure_dns_settings_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.configure_dns_settings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "registration", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_configure_dns_settings_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_configure_dns_settings" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_configure_dns_settings" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.ConfigureDnsSettingsRequest.pb( + domains.ConfigureDnsSettingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.ConfigureDnsSettingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.configure_dns_settings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_configure_dns_settings_rest_bad_request( + transport: str = "rest", request_type=domains.ConfigureDnsSettingsRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.configure_dns_settings(request) + + +def test_configure_dns_settings_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + registration="registration_value", + dns_settings=domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.configure_dns_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{registration=projects/*/locations/*/registrations/*}:configureDnsSettings" + % client.transport._host, + args[1], + ) + + +def test_configure_dns_settings_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.configure_dns_settings( + domains.ConfigureDnsSettingsRequest(), + registration="registration_value", + dns_settings=domains.DnsSettings( + custom_dns=domains.DnsSettings.CustomDns( + name_servers=["name_servers_value"] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_configure_dns_settings_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ConfigureContactSettingsRequest, + dict, + ], +) +def test_configure_contact_settings_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.configure_contact_settings(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_configure_contact_settings_rest_required_fields( + request_type=domains.ConfigureContactSettingsRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["registration"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).configure_contact_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["registration"] = "registration_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).configure_contact_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "registration" in jsonified_request + assert jsonified_request["registration"] == "registration_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.configure_contact_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_configure_contact_settings_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.configure_contact_settings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "registration", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_configure_contact_settings_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_configure_contact_settings" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_configure_contact_settings" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.ConfigureContactSettingsRequest.pb( + domains.ConfigureContactSettingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.ConfigureContactSettingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.configure_contact_settings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_configure_contact_settings_rest_bad_request( + transport: str = "rest", request_type=domains.ConfigureContactSettingsRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.configure_contact_settings(request) + + +def test_configure_contact_settings_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + registration="registration_value", + contact_settings=domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.configure_contact_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{registration=projects/*/locations/*/registrations/*}:configureContactSettings" + % client.transport._host, + args[1], + ) + + +def test_configure_contact_settings_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.configure_contact_settings( + domains.ConfigureContactSettingsRequest(), + registration="registration_value", + contact_settings=domains.ContactSettings( + privacy=domains.ContactPrivacy.PUBLIC_CONTACT_DATA + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_configure_contact_settings_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ExportRegistrationRequest, + dict, + ], +) +def test_export_registration_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/registrations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.export_registration(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_export_registration_rest_required_fields( + request_type=domains.ExportRegistrationRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.export_registration(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_export_registration_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.export_registration._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_registration_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_export_registration" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_export_registration" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.ExportRegistrationRequest.pb( + domains.ExportRegistrationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.ExportRegistrationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_registration_rest_bad_request( + transport: str = "rest", request_type=domains.ExportRegistrationRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/registrations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_registration(request) + + +def test_export_registration_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.export_registration(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{name=projects/*/locations/*/registrations/*}:export" + % client.transport._host, + args[1], + ) + + +def test_export_registration_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_registration( + domains.ExportRegistrationRequest(), + name="name_value", + ) + + +def test_export_registration_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.DeleteRegistrationRequest, + dict, + ], +) +def test_delete_registration_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/registrations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_registration(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_registration_rest_required_fields( + request_type=domains.DeleteRegistrationRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_registration(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_registration_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_registration._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_registration_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DomainsRestInterceptor, "post_delete_registration" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_delete_registration" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.DeleteRegistrationRequest.pb( + domains.DeleteRegistrationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = domains.DeleteRegistrationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_registration_rest_bad_request( + transport: str = "rest", request_type=domains.DeleteRegistrationRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/registrations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_registration(request) + + +def test_delete_registration_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_registration(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{name=projects/*/locations/*/registrations/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_registration_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_registration( + domains.DeleteRegistrationRequest(), + name="name_value", + ) + + +def test_delete_registration_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.RetrieveAuthorizationCodeRequest, + dict, + ], +) +def test_retrieve_authorization_code_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.AuthorizationCode( + code="code_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.AuthorizationCode.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.retrieve_authorization_code(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.AuthorizationCode) + assert response.code == "code_value" + + +def test_retrieve_authorization_code_rest_required_fields( + request_type=domains.RetrieveAuthorizationCodeRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["registration"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_authorization_code._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["registration"] = "registration_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retrieve_authorization_code._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "registration" in jsonified_request + assert jsonified_request["registration"] == "registration_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = domains.AuthorizationCode() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = domains.AuthorizationCode.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.retrieve_authorization_code(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_retrieve_authorization_code_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.retrieve_authorization_code._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("registration",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_retrieve_authorization_code_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DomainsRestInterceptor, "post_retrieve_authorization_code" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_retrieve_authorization_code" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.RetrieveAuthorizationCodeRequest.pb( + domains.RetrieveAuthorizationCodeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = domains.AuthorizationCode.to_json( + domains.AuthorizationCode() + ) + + request = domains.RetrieveAuthorizationCodeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = domains.AuthorizationCode() + + client.retrieve_authorization_code( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_retrieve_authorization_code_rest_bad_request( + transport: str = "rest", request_type=domains.RetrieveAuthorizationCodeRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.retrieve_authorization_code(request) + + +def test_retrieve_authorization_code_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.AuthorizationCode() + + # get arguments that satisfy an http rule for this method + sample_request = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + registration="registration_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.AuthorizationCode.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.retrieve_authorization_code(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{registration=projects/*/locations/*/registrations/*}:retrieveAuthorizationCode" + % client.transport._host, + args[1], + ) + + +def test_retrieve_authorization_code_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retrieve_authorization_code( + domains.RetrieveAuthorizationCodeRequest(), + registration="registration_value", + ) + + +def test_retrieve_authorization_code_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + domains.ResetAuthorizationCodeRequest, + dict, + ], +) +def test_reset_authorization_code_rest(request_type): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.AuthorizationCode( + code="code_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.AuthorizationCode.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.reset_authorization_code(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, domains.AuthorizationCode) + assert response.code == "code_value" + + +def test_reset_authorization_code_rest_required_fields( + request_type=domains.ResetAuthorizationCodeRequest, +): + transport_class = transports.DomainsRestTransport + + request_init = {} + request_init["registration"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reset_authorization_code._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["registration"] = "registration_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reset_authorization_code._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "registration" in jsonified_request + assert jsonified_request["registration"] == "registration_value" + + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = domains.AuthorizationCode() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = domains.AuthorizationCode.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.reset_authorization_code(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_reset_authorization_code_rest_unset_required_fields(): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.reset_authorization_code._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("registration",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reset_authorization_code_rest_interceptors(null_interceptor): + transport = transports.DomainsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DomainsRestInterceptor(), + ) + client = DomainsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DomainsRestInterceptor, "post_reset_authorization_code" + ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "pre_reset_authorization_code" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = domains.ResetAuthorizationCodeRequest.pb( + domains.ResetAuthorizationCodeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = domains.AuthorizationCode.to_json( + domains.AuthorizationCode() + ) + + request = domains.ResetAuthorizationCodeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = domains.AuthorizationCode() + + client.reset_authorization_code( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reset_authorization_code_rest_bad_request( + transport: str = "rest", request_type=domains.ResetAuthorizationCodeRequest +): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reset_authorization_code(request) + + +def test_reset_authorization_code_rest_flattened(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = domains.AuthorizationCode() + + # get arguments that satisfy an http rule for this method + sample_request = { + "registration": "projects/sample1/locations/sample2/registrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + registration="registration_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = domains.AuthorizationCode.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.reset_authorization_code(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{registration=projects/*/locations/*/registrations/*}:resetAuthorizationCode" + % client.transport._host, + args[1], + ) + + +def test_reset_authorization_code_rest_flattened_error(transport: str = "rest"): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reset_authorization_code( + domains.ResetAuthorizationCodeRequest(), + registration="registration_value", + ) + + +def test_reset_authorization_code_rest_error(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DomainsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DomainsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DomainsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DomainsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DomainsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DomainsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DomainsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DomainsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DomainsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DomainsClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DomainsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DomainsGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DomainsGrpcTransport, + transports.DomainsGrpcAsyncIOTransport, + transports.DomainsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DomainsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DomainsGrpcTransport, + ) + + +def test_domains_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DomainsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_domains_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.domains_v1beta1.services.domains.transports.DomainsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DomainsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "search_domains", + "retrieve_register_parameters", + "register_domain", + "retrieve_transfer_parameters", + "transfer_domain", + "list_registrations", + "get_registration", + "update_registration", + "configure_management_settings", + "configure_dns_settings", + "configure_contact_settings", + "export_registration", + "delete_registration", + "retrieve_authorization_code", + "reset_authorization_code", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_domains_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.domains_v1beta1.services.domains.transports.DomainsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DomainsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_domains_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.domains_v1beta1.services.domains.transports.DomainsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DomainsTransport() + adc.assert_called_once() + + +def test_domains_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DomainsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DomainsGrpcTransport, + transports.DomainsGrpcAsyncIOTransport, + ], +) +def test_domains_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DomainsGrpcTransport, + transports.DomainsGrpcAsyncIOTransport, + transports.DomainsRestTransport, + ], +) +def test_domains_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DomainsGrpcTransport, grpc_helpers), + (transports.DomainsGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_domains_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "domains.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="domains.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.DomainsGrpcTransport, transports.DomainsGrpcAsyncIOTransport], +) +def test_domains_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_domains_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DomainsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_domains_rest_lro_client(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_domains_host_no_port(transport_name): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="domains.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "domains.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://domains.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_domains_host_with_port(transport_name): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="domains.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "domains.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://domains.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_domains_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DomainsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DomainsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.search_domains._session + session2 = client2.transport.search_domains._session + assert session1 != session2 + session1 = client1.transport.retrieve_register_parameters._session + session2 = client2.transport.retrieve_register_parameters._session + assert session1 != session2 + session1 = client1.transport.register_domain._session + session2 = client2.transport.register_domain._session + assert session1 != session2 + session1 = client1.transport.retrieve_transfer_parameters._session + session2 = client2.transport.retrieve_transfer_parameters._session + assert session1 != session2 + session1 = client1.transport.transfer_domain._session + session2 = client2.transport.transfer_domain._session + assert session1 != session2 + session1 = client1.transport.list_registrations._session + session2 = client2.transport.list_registrations._session + assert session1 != session2 + session1 = client1.transport.get_registration._session + session2 = client2.transport.get_registration._session + assert session1 != session2 + session1 = client1.transport.update_registration._session + session2 = client2.transport.update_registration._session + assert session1 != session2 + session1 = client1.transport.configure_management_settings._session + session2 = client2.transport.configure_management_settings._session + assert session1 != session2 + session1 = client1.transport.configure_dns_settings._session + session2 = client2.transport.configure_dns_settings._session + assert session1 != session2 + session1 = client1.transport.configure_contact_settings._session + session2 = client2.transport.configure_contact_settings._session + assert session1 != session2 + session1 = client1.transport.export_registration._session + session2 = client2.transport.export_registration._session + assert session1 != session2 + session1 = client1.transport.delete_registration._session + session2 = client2.transport.delete_registration._session + assert session1 != session2 + session1 = client1.transport.retrieve_authorization_code._session + session2 = client2.transport.retrieve_authorization_code._session + assert session1 != session2 + session1 = client1.transport.reset_authorization_code._session + session2 = client2.transport.reset_authorization_code._session + assert session1 != session2 + + +def test_domains_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DomainsGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_domains_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DomainsGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.DomainsGrpcTransport, transports.DomainsGrpcAsyncIOTransport], +) +def test_domains_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.DomainsGrpcTransport, transports.DomainsGrpcAsyncIOTransport], +) +def test_domains_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_domains_grpc_lro_client(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_domains_grpc_lro_async_client(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_registration_path(): + project = "squid" + location = "clam" + registration = "whelk" + expected = ( + "projects/{project}/locations/{location}/registrations/{registration}".format( + project=project, + location=location, + registration=registration, + ) + ) + actual = DomainsClient.registration_path(project, location, registration) + assert expected == actual + + +def test_parse_registration_path(): + expected = { + "project": "octopus", + "location": "oyster", + "registration": "nudibranch", + } + path = DomainsClient.registration_path(**expected) + + # Check that the path construction is reversible. + actual = DomainsClient.parse_registration_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DomainsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = DomainsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DomainsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DomainsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = DomainsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DomainsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DomainsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = DomainsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DomainsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = DomainsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = DomainsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DomainsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DomainsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = DomainsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DomainsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DomainsTransport, "_prep_wrapped_messages" + ) as prep: + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DomainsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DomainsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DomainsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = DomainsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DomainsClient, transports.DomainsGrpcTransport), + (DomainsAsyncClient, transports.DomainsGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-enterpriseknowledgegraph/.coveragerc b/packages/google-cloud-enterpriseknowledgegraph/.coveragerc index 21c7c49e2744..7315fb4cdbf6 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/.coveragerc +++ b/packages/google-cloud-enterpriseknowledgegraph/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/cloud/enterpriseknowledgegraph/__init__.py + google/cloud/enterpriseknowledgegraph/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md b/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md index 20a126c4e65a..daa89271fcd7 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md +++ b/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.3.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-enterpriseknowledgegraph-v0.3.1...google-cloud-enterpriseknowledgegraph-v0.3.2) (2023-03-25) + + +### Documentation + +* Fix formatting of request arg in docstring ([#10867](https://github.com/googleapis/google-cloud-python/issues/10867)) ([d34a425](https://github.com/googleapis/google-cloud-python/commit/d34a425f7d0f02bebaf20d24b725b8c25c699697)) + ## [0.3.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-enterpriseknowledgegraph-v0.3.0...google-cloud-enterpriseknowledgegraph-v0.3.1) (2023-01-20) diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph/gapic_version.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph/gapic_version.py index 056efd287f1d..553d98052667 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph/gapic_version.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.3.1" # {x-release-please-version} +__version__ = "0.3.2" # {x-release-please-version} diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/__init__.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/__init__.py index 3c14141bc1b2..23916ae19e2f 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/__init__.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.enterpriseknowledgegraph import gapic_version as package_version +from google.cloud.enterpriseknowledgegraph_v1 import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/gapic_version.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/gapic_version.py index e94c45aea923..553d98052667 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/gapic_version.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.2.0" # {x-release-please-version} +__version__ = "0.3.2" # {x-release-please-version} diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/transports/rest.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/transports/rest.py index 7a73d79aa8b2..4adf556a0f7f 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/transports/rest.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/transports/rest.py @@ -17,7 +17,7 @@ import dataclasses import json # type: ignore import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming @@ -418,7 +418,7 @@ class _CancelEntityReconciliationJob(EnterpriseKnowledgeGraphServiceRestStub): def __hash__(self): return hash("CancelEntityReconciliationJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -443,7 +443,6 @@ def __call__( request (~.service.CancelEntityReconciliationJobRequest): The request object. Request message for CancelEntityReconciliationJob. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -506,7 +505,7 @@ class _CreateEntityReconciliationJob(EnterpriseKnowledgeGraphServiceRestStub): def __hash__(self): return hash("CreateEntityReconciliationJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -531,7 +530,6 @@ def __call__( request (~.service.CreateEntityReconciliationJobRequest): The request object. Request message for CreateEntityReconciliationJob. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -606,7 +604,7 @@ class _DeleteEntityReconciliationJob(EnterpriseKnowledgeGraphServiceRestStub): def __hash__(self): return hash("DeleteEntityReconciliationJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -631,7 +629,6 @@ def __call__( request (~.service.DeleteEntityReconciliationJobRequest): The request object. Request message for DeleteEntityReconciliationJob. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -685,7 +682,7 @@ class _GetEntityReconciliationJob(EnterpriseKnowledgeGraphServiceRestStub): def __hash__(self): return hash("GetEntityReconciliationJob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -710,7 +707,6 @@ def __call__( request (~.service.GetEntityReconciliationJobRequest): The request object. Request message for GetEntityReconciliationJob. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -776,7 +772,7 @@ class _ListEntityReconciliationJobs(EnterpriseKnowledgeGraphServiceRestStub): def __hash__(self): return hash("ListEntityReconciliationJobs") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -801,7 +797,6 @@ def __call__( request (~.service.ListEntityReconciliationJobsRequest): The request object. Request message for [EnterpriseKnowledgeGraphService.ListEntityReconciliationJobs][google.cloud.enterpriseknowledgegraph.v1.EnterpriseKnowledgeGraphService.ListEntityReconciliationJobs]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -869,7 +864,7 @@ class _Lookup(EnterpriseKnowledgeGraphServiceRestStub): def __hash__(self): return hash("Lookup") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "ids": "", } @@ -895,7 +890,6 @@ def __call__( request (~.service.LookupRequest): The request object. Request message for [EnterpriseKnowledgeGraphService.Lookup][google.cloud.enterpriseknowledgegraph.v1.EnterpriseKnowledgeGraphService.Lookup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -961,7 +955,7 @@ class _LookupPublicKg(EnterpriseKnowledgeGraphServiceRestStub): def __hash__(self): return hash("LookupPublicKg") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "ids": "", } @@ -987,7 +981,6 @@ def __call__( request (~.service.LookupPublicKgRequest): The request object. Request message for [EnterpriseKnowledgeGraphService.LookupPublicKg][google.cloud.enterpriseknowledgegraph.v1.EnterpriseKnowledgeGraphService.LookupPublicKg]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1055,7 +1048,7 @@ class _Search(EnterpriseKnowledgeGraphServiceRestStub): def __hash__(self): return hash("Search") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "query": "", } @@ -1081,7 +1074,6 @@ def __call__( request (~.service.SearchRequest): The request object. Request message for [EnterpriseKnowledgeGraphService.Search][google.cloud.enterpriseknowledgegraph.v1.EnterpriseKnowledgeGraphService.Search]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1147,7 +1139,7 @@ class _SearchPublicKg(EnterpriseKnowledgeGraphServiceRestStub): def __hash__(self): return hash("SearchPublicKg") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "query": "", } @@ -1173,7 +1165,6 @@ def __call__( request (~.service.SearchPublicKgRequest): The request object. Request message for [EnterpriseKnowledgeGraphService.Search][google.cloud.enterpriseknowledgegraph.v1.EnterpriseKnowledgeGraphService.Search]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/job_state.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/job_state.py index 586b4323f28b..a5acfe57604f 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/job_state.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/job_state.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/operation_metadata.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/operation_metadata.py index a4e3d5f6e9e1..ee0a21d3b840 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/operation_metadata.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/operation_metadata.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import timestamp_pb2 # type: ignore diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/service.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/service.py index de872e8060c7..5fed4ff4d09d 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/service.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/types/service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import struct_pb2 # type: ignore diff --git a/packages/google-cloud-enterpriseknowledgegraph/noxfile.py b/packages/google-cloud-enterpriseknowledgegraph/noxfile.py index fcacc5d528dd..1749edb5dad7 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/noxfile.py +++ b/packages/google-cloud-enterpriseknowledgegraph/noxfile.py @@ -189,9 +189,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -378,8 +378,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", diff --git a/packages/google-cloud-enterpriseknowledgegraph/samples/generated_samples/snippet_metadata_google.cloud.enterpriseknowledgegraph.v1.json b/packages/google-cloud-enterpriseknowledgegraph/samples/generated_samples/snippet_metadata_google.cloud.enterpriseknowledgegraph.v1.json index d44a6559a82b..1ca25b3e7f26 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/samples/generated_samples/snippet_metadata_google.cloud.enterpriseknowledgegraph.v1.json +++ b/packages/google-cloud-enterpriseknowledgegraph/samples/generated_samples/snippet_metadata_google.cloud.enterpriseknowledgegraph.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-enterpriseknowledgegraph", - "version": "0.3.1" + "version": "0.3.2" }, "snippets": [ { diff --git a/packages/google-cloud-enterpriseknowledgegraph/setup.py b/packages/google-cloud-enterpriseknowledgegraph/setup.py index 246f50157e2f..b2bcdd13f525 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/setup.py +++ b/packages/google-cloud-enterpriseknowledgegraph/setup.py @@ -57,9 +57,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.cloud"] setuptools.setup( name=name, diff --git a/packages/google-cloud-iap/.OwlBot.yaml b/packages/google-cloud-iap/.OwlBot.yaml new file mode 100644 index 000000000000..727d1c4052bb --- /dev/null +++ b/packages/google-cloud-iap/.OwlBot.yaml @@ -0,0 +1,26 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-preserve-regex: + - /owl-bot-staging/google-cloud-iap/v1beta1 + +deep-copy-regex: + - source: /google/cloud/iap/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-iap/$1 + +begin-after-commit-hash: 70f7f0525414fe4dfeb2fc2e81546b073f83a621 diff --git a/packages/google-cloud-iap/.coveragerc b/packages/google-cloud-iap/.coveragerc new file mode 100644 index 000000000000..46e950e71ee3 --- /dev/null +++ b/packages/google-cloud-iap/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/iap/__init__.py + google/cloud/iap/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-iap/.flake8 b/packages/google-cloud-iap/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-cloud-iap/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-iap/.gitignore b/packages/google-cloud-iap/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-iap/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-iap/.repo-metadata.json b/packages/google-cloud-iap/.repo-metadata.json new file mode 100644 index 000000000000..973bdf74e8f5 --- /dev/null +++ b/packages/google-cloud-iap/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "iap", + "name_pretty": "Identity-Aware Proxy", + "product_documentation": "https://cloud.google.com/iap", + "client_documentation": "https://cloud.google.com/python/docs/reference/iap/latest", + "issue_tracker": "", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-iap", + "api_id": "iap.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "iap", + "api_description": "Identity-Aware Proxy includes a number of features that can be used to protect access to Google Cloud hosted resources and applications hosted on Google Cloud." +} diff --git a/packages/google-cloud-iap/CHANGELOG.md b/packages/google-cloud-iap/CHANGELOG.md new file mode 100644 index 000000000000..2ce71f1169fb --- /dev/null +++ b/packages/google-cloud-iap/CHANGELOG.md @@ -0,0 +1,249 @@ +# Changelog + +## [1.10.1](https://github.com/googleapis/python-iap/compare/v1.10.0...v1.10.1) (2023-04-01) + + +### Documentation + +* Minor changes in AttributePropagationSettings ([#168](https://github.com/googleapis/python-iap/issues/168)) ([2c61c43](https://github.com/googleapis/python-iap/commit/2c61c438f6e254fea0662589f7047d2c8464d9f2)) + +## [1.10.0](https://github.com/googleapis/python-iap/compare/v1.9.0...v1.10.0) (2023-03-23) + + +### Features + +* Add an enum ENROLLED_SECOND_FACTORS under IapSettings ([8890e9e](https://github.com/googleapis/python-iap/commit/8890e9eceb2cfc41c3fee269d2e084052e783c5e)) + + +### Documentation + +* Fix formatting of request arg in docstring ([#167](https://github.com/googleapis/python-iap/issues/167)) ([a95d454](https://github.com/googleapis/python-iap/commit/a95d4548860e8bf8f3a6e269c9b3882cda42d364)) +* Update doc description for field_mask ([8890e9e](https://github.com/googleapis/python-iap/commit/8890e9eceb2cfc41c3fee269d2e084052e783c5e)) + +## [1.9.0](https://github.com/googleapis/python-iap/compare/v1.8.1...v1.9.0) (2023-02-19) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#157](https://github.com/googleapis/python-iap/issues/157)) ([15be642](https://github.com/googleapis/python-iap/commit/15be6424ef580e8f02855a7f939a331a42236cd1)) + +## [1.8.1](https://github.com/googleapis/python-iap/compare/v1.8.0...v1.8.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([6d93973](https://github.com/googleapis/python-iap/commit/6d9397372916d91bb156f8995f3741c98729aa84)) + + +### Documentation + +* Add documentation for enums ([6d93973](https://github.com/googleapis/python-iap/commit/6d9397372916d91bb156f8995f3741c98729aa84)) + +## [1.8.0](https://github.com/googleapis/python-iap/compare/v1.7.0...v1.8.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#149](https://github.com/googleapis/python-iap/issues/149)) ([e136d2f](https://github.com/googleapis/python-iap/commit/e136d2fcd7e12c811972a2ff0fe819154e9cfca2)) + +## [1.7.0](https://github.com/googleapis/python-iap/compare/v1.6.0...v1.7.0) (2023-01-04) + + +### Features + +* Add AllowedDomainSettings to the UpdateIapSettingsRequest ([1be4844](https://github.com/googleapis/python-iap/commit/1be48441b21823dbc1ac53f7da7290da7bb66352)) +* Add AttributePropagationSettings to the UpdateIapSettingsRequest ([1be4844](https://github.com/googleapis/python-iap/commit/1be48441b21823dbc1ac53f7da7290da7bb66352)) +* Add remediation_token_generation_enabled to the CsmSettings ([1be4844](https://github.com/googleapis/python-iap/commit/1be48441b21823dbc1ac53f7da7290da7bb66352)) + +## [1.6.0](https://github.com/googleapis/python-iap/compare/v1.5.4...v1.6.0) (2022-12-14) + + +### Features + +* Add support for `google.cloud.iap.__version__` ([828e0bd](https://github.com/googleapis/python-iap/commit/828e0bdc68c1f1c0f8986dec6bd0d99b0bd8f561)) +* Add typing to proto.Message based class attributes ([828e0bd](https://github.com/googleapis/python-iap/commit/828e0bdc68c1f1c0f8986dec6bd0d99b0bd8f561)) + + +### Bug Fixes + +* Add dict typing for client_options ([828e0bd](https://github.com/googleapis/python-iap/commit/828e0bdc68c1f1c0f8986dec6bd0d99b0bd8f561)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([2c0f3e5](https://github.com/googleapis/python-iap/commit/2c0f3e5f044b956b02e1389741adeaa0ee2bc259)) +* Drop usage of pkg_resources ([2c0f3e5](https://github.com/googleapis/python-iap/commit/2c0f3e5f044b956b02e1389741adeaa0ee2bc259)) +* Fix timeout default values ([2c0f3e5](https://github.com/googleapis/python-iap/commit/2c0f3e5f044b956b02e1389741adeaa0ee2bc259)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([828e0bd](https://github.com/googleapis/python-iap/commit/828e0bdc68c1f1c0f8986dec6bd0d99b0bd8f561)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([2c0f3e5](https://github.com/googleapis/python-iap/commit/2c0f3e5f044b956b02e1389741adeaa0ee2bc259)) + +## [1.5.4](https://github.com/googleapis/python-iap/compare/v1.5.3...v1.5.4) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#137](https://github.com/googleapis/python-iap/issues/137)) ([c5fd70f](https://github.com/googleapis/python-iap/commit/c5fd70f7168a5a87a52afb25a399a6990ab2c9e0)) + +## [1.5.3](https://github.com/googleapis/python-iap/compare/v1.5.2...v1.5.3) (2022-10-03) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#135](https://github.com/googleapis/python-iap/issues/135)) ([8ba970c](https://github.com/googleapis/python-iap/commit/8ba970cd3e7395e3c52cdf57b0af813f5d68e162)) + +## [1.5.2](https://github.com/googleapis/python-iap/compare/v1.5.1...v1.5.2) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#120](https://github.com/googleapis/python-iap/issues/120)) ([e5e67d0](https://github.com/googleapis/python-iap/commit/e5e67d0df1f26b1a786e23b26028de1406458b3f)) +* **deps:** require proto-plus >= 1.22.0 ([e5e67d0](https://github.com/googleapis/python-iap/commit/e5e67d0df1f26b1a786e23b26028de1406458b3f)) + +## [1.5.1](https://github.com/googleapis/python-iap/compare/v1.5.0...v1.5.1) (2022-07-13) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#114](https://github.com/googleapis/python-iap/issues/114)) ([ecf5c0e](https://github.com/googleapis/python-iap/commit/ecf5c0ef344578bc32474b1b6b13216faa2ebcbb)) + +## [1.5.0](https://github.com/googleapis/python-iap/compare/v1.4.1...v1.5.0) (2022-07-12) + + +### Features + +* add audience parameter ([01dc926](https://github.com/googleapis/python-iap/commit/01dc926fe83c7e774aefa7d5b859aa0c8a606e8e)) + + +### Bug Fixes + +* **deps:** require google-api-core >= 2.8.0 ([#110](https://github.com/googleapis/python-iap/issues/110)) ([01dc926](https://github.com/googleapis/python-iap/commit/01dc926fe83c7e774aefa7d5b859aa0c8a606e8e)) +* require python 3.7+ ([#112](https://github.com/googleapis/python-iap/issues/112)) ([7230d5f](https://github.com/googleapis/python-iap/commit/7230d5f2d9b15c970d56305a823886858b3d23c9)) + +## [1.4.1](https://github.com/googleapis/python-iap/compare/v1.4.0...v1.4.1) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#103](https://github.com/googleapis/python-iap/issues/103)) ([c8b9cc5](https://github.com/googleapis/python-iap/commit/c8b9cc54d3734f62fd5bfaeb8f7f107427f3bae3)) + + +### Documentation + +* fix changelog header to consistent size ([#102](https://github.com/googleapis/python-iap/issues/102)) ([7c7a470](https://github.com/googleapis/python-iap/commit/7c7a4705af42124a616a193ebc4a665bfa9e69be)) + +## [1.4.0](https://github.com/googleapis/python-iap/compare/v1.3.1...v1.4.0) (2022-05-19) + + +### Features + +* add ReauthSettings to the UpdateIapSettingsRequest ([36c1866](https://github.com/googleapis/python-iap/commit/36c1866e605e4e880e65eb44a7d4dc49389f92f3)) +* add the TunnelDestGroup-related methods and types ([#93](https://github.com/googleapis/python-iap/issues/93)) ([36c1866](https://github.com/googleapis/python-iap/commit/36c1866e605e4e880e65eb44a7d4dc49389f92f3)) +* AuditConfig for IAM v1 ([49dc9c7](https://github.com/googleapis/python-iap/commit/49dc9c7162e956c684892bdf866f20b47e3b27d2)) + + +### Bug Fixes + +* **deps:** require grpc-google-iam-v1 >=0.12.4 ([49dc9c7](https://github.com/googleapis/python-iap/commit/49dc9c7162e956c684892bdf866f20b47e3b27d2)) + +## [1.3.1](https://github.com/googleapis/python-iap/compare/v1.3.0...v1.3.1) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#71](https://github.com/googleapis/python-iap/issues/71)) ([0e00d6a](https://github.com/googleapis/python-iap/commit/0e00d6a489ee97707bb733387a951e48e1f415dc)) + +## [1.3.0](https://github.com/googleapis/python-iap/compare/v1.2.1...v1.3.0) (2022-02-26) + + +### Features + +* add api key support ([#57](https://github.com/googleapis/python-iap/issues/57)) ([1787cb0](https://github.com/googleapis/python-iap/commit/1787cb00158f111915d0f9bb948abeceab75a6f4)) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([18f725d](https://github.com/googleapis/python-iap/commit/18f725d81007e02d2054538087d7567029b2d179)) + + +### Documentation + +* add generated snippets ([#62](https://github.com/googleapis/python-iap/issues/62)) ([27c14d2](https://github.com/googleapis/python-iap/commit/27c14d24ce50d6bb80f09218bcf0ebc1db9ceabd)) + +## [1.2.1](https://www.github.com/googleapis/python-iap/compare/v1.2.0...v1.2.1) (2021-11-01) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([30b353c](https://www.github.com/googleapis/python-iap/commit/30b353c9296d37f8792759a5ee997b3f50572f19)) +* **deps:** require google-api-core >= 1.28.0 ([30b353c](https://www.github.com/googleapis/python-iap/commit/30b353c9296d37f8792759a5ee997b3f50572f19)) + + +### Documentation + +* list oneofs in docstring ([30b353c](https://www.github.com/googleapis/python-iap/commit/30b353c9296d37f8792759a5ee997b3f50572f19)) + +## [1.2.0](https://www.github.com/googleapis/python-iap/compare/v1.1.0...v1.2.0) (2021-10-18) + + +### Features + +* add support for python 3.10 ([#37](https://www.github.com/googleapis/python-iap/issues/37)) ([7716426](https://www.github.com/googleapis/python-iap/commit/7716426b83a457a3206fae1dee66c46cf35bd7e7)) + +## [1.1.0](https://www.github.com/googleapis/python-iap/compare/v1.0.2...v1.1.0) (2021-10-08) + + +### Features + +* add context manager support in client ([#34](https://www.github.com/googleapis/python-iap/issues/34)) ([a985591](https://www.github.com/googleapis/python-iap/commit/a985591b016e768a5a172ab5f8de873319b1e7e0)) + +## [1.0.2](https://www.github.com/googleapis/python-iap/compare/v1.0.1...v1.0.2) (2021-10-05) + + +### Bug Fixes + +* improper types in pagers generation ([242d445](https://www.github.com/googleapis/python-iap/commit/242d44516fe55141a26024653158ea94fa93e525)) + +## [1.0.1](https://www.github.com/googleapis/python-iap/compare/v1.0.0...v1.0.1) (2021-09-24) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([a5b27d2](https://www.github.com/googleapis/python-iap/commit/a5b27d26e4bc845aeede7281959a81f693ee52c2)) + +## [1.0.0](https://www.github.com/googleapis/python-iap/compare/v0.1.2...v1.0.0) (2021-08-09) + + +### Features + +* bump release level to production/stable ([#13](https://www.github.com/googleapis/python-iap/issues/13)) ([9d0a9f8](https://www.github.com/googleapis/python-iap/commit/9d0a9f84554b98fd2b1829532c9c13b16432b0af)) + +## [0.1.2](https://www.github.com/googleapis/python-iap/compare/v0.1.1...v0.1.2) (2021-07-29) + + +### Bug Fixes + +* enable self signed jwt for grpc ([#9](https://www.github.com/googleapis/python-iap/issues/9)) ([51304a3](https://www.github.com/googleapis/python-iap/commit/51304a327207a233e40308a8b49c9fdeda87c28b)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#5](https://www.github.com/googleapis/python-iap/issues/5)) ([2ba31c5](https://www.github.com/googleapis/python-iap/commit/2ba31c5a2ea2e52c4a79410548a252bf8fc0522e)) + + +### Miscellaneous Chores + +* release as 0.1.2 ([#10](https://www.github.com/googleapis/python-iap/issues/10)) ([4499ba5](https://www.github.com/googleapis/python-iap/commit/4499ba5ccd90edc882fbda73e4d792074ff44e6d)) + +## [0.1.1](https://www.github.com/googleapis/python-iap/compare/v0.1.0...v0.1.1) (2021-07-21) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#4](https://www.github.com/googleapis/python-iap/issues/4)) ([21e9e57](https://www.github.com/googleapis/python-iap/commit/21e9e57451a87d9f9dd1137142d138cb73aa746c)) + +## 0.1.0 (2021-07-06) + + +### Features + +* generate v1 ([6fdf055](https://www.github.com/googleapis/python-iap/commit/6fdf055c835adf6715bf43e9255d02abcd2affd4)) diff --git a/packages/google-cloud-iap/CODE_OF_CONDUCT.md b/packages/google-cloud-iap/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-iap/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-iap/CONTRIBUTING.rst b/packages/google-cloud-iap/CONTRIBUTING.rst new file mode 100644 index 000000000000..bdba346097ac --- /dev/null +++ b/packages/google-cloud-iap/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-iap + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-iap/LICENSE b/packages/google-cloud-iap/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-iap/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-iap/MANIFEST.in b/packages/google-cloud-iap/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-cloud-iap/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-iap/README.rst b/packages/google-cloud-iap/README.rst new file mode 100644 index 000000000000..9e5711bf31e3 --- /dev/null +++ b/packages/google-cloud-iap/README.rst @@ -0,0 +1,107 @@ +Python Client for Identity-Aware Proxy +====================================== + +|stable| |pypi| |versions| + +`Identity-Aware Proxy`_: Identity-Aware Proxy includes a number of features that can be used to protect access to Google Cloud hosted resources and applications hosted on Google Cloud. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-iap.svg + :target: https://pypi.org/project/google-cloud-iap/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-iap.svg + :target: https://pypi.org/project/google-cloud-iap/ +.. _Identity-Aware Proxy: https://cloud.google.com/iap +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/iap/latest +.. _Product Documentation: https://cloud.google.com/iap + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Identity-Aware Proxy.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Identity-Aware Proxy.: https://cloud.google.com/iap +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-iap + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-iap + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Identity-Aware Proxy + to see other available methods on the client. +- Read the `Identity-Aware Proxy Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Identity-Aware Proxy Product documentation: https://cloud.google.com/iap +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-iap/SECURITY.md b/packages/google-cloud-iap/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-iap/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-iap/docs/CHANGELOG.md b/packages/google-cloud-iap/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-iap/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-iap/docs/README.rst b/packages/google-cloud-iap/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-iap/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-iap/docs/_static/custom.css b/packages/google-cloud-iap/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-iap/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-iap/docs/_templates/layout.html b/packages/google-cloud-iap/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-iap/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-iap/docs/conf.py b/packages/google-cloud-iap/docs/conf.py new file mode 100644 index 000000000000..78c73ca78b88 --- /dev/null +++ b/packages/google-cloud-iap/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-iap documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-iap" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-iap", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-iap-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-iap.tex", + "google-cloud-iap Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-iap", + "google-cloud-iap Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-iap", + "google-cloud-iap Documentation", + author, + "google-cloud-iap", + "google-cloud-iap Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-iap/docs/iap_v1/identity_aware_proxy_admin_service.rst b/packages/google-cloud-iap/docs/iap_v1/identity_aware_proxy_admin_service.rst new file mode 100644 index 000000000000..3098cf5d3834 --- /dev/null +++ b/packages/google-cloud-iap/docs/iap_v1/identity_aware_proxy_admin_service.rst @@ -0,0 +1,10 @@ +IdentityAwareProxyAdminService +------------------------------------------------ + +.. automodule:: google.cloud.iap_v1.services.identity_aware_proxy_admin_service + :members: + :inherited-members: + +.. automodule:: google.cloud.iap_v1.services.identity_aware_proxy_admin_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-iap/docs/iap_v1/identity_aware_proxy_o_auth_service.rst b/packages/google-cloud-iap/docs/iap_v1/identity_aware_proxy_o_auth_service.rst new file mode 100644 index 000000000000..172d1e671bee --- /dev/null +++ b/packages/google-cloud-iap/docs/iap_v1/identity_aware_proxy_o_auth_service.rst @@ -0,0 +1,10 @@ +IdentityAwareProxyOAuthService +------------------------------------------------ + +.. automodule:: google.cloud.iap_v1.services.identity_aware_proxy_o_auth_service + :members: + :inherited-members: + +.. automodule:: google.cloud.iap_v1.services.identity_aware_proxy_o_auth_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-iap/docs/iap_v1/services.rst b/packages/google-cloud-iap/docs/iap_v1/services.rst new file mode 100644 index 000000000000..7f5aebfa27f7 --- /dev/null +++ b/packages/google-cloud-iap/docs/iap_v1/services.rst @@ -0,0 +1,7 @@ +Services for Google Cloud Iap v1 API +==================================== +.. toctree:: + :maxdepth: 2 + + identity_aware_proxy_admin_service + identity_aware_proxy_o_auth_service diff --git a/packages/google-cloud-iap/docs/iap_v1/types.rst b/packages/google-cloud-iap/docs/iap_v1/types.rst new file mode 100644 index 000000000000..711d3b91bff3 --- /dev/null +++ b/packages/google-cloud-iap/docs/iap_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Iap v1 API +================================= + +.. automodule:: google.cloud.iap_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-iap/docs/index.rst b/packages/google-cloud-iap/docs/index.rst new file mode 100644 index 000000000000..29ceab9eb6eb --- /dev/null +++ b/packages/google-cloud-iap/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + iap_v1/services + iap_v1/types + + +Changelog +--------- + +For a list of all ``google-cloud-iap`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-iap/docs/multiprocessing.rst b/packages/google-cloud-iap/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-iap/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-iap/google/cloud/iap/__init__.py b/packages/google-cloud-iap/google/cloud/iap/__init__.py new file mode 100644 index 000000000000..a7d4e1969af6 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap/__init__.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.iap import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.iap_v1.services.identity_aware_proxy_admin_service.async_client import ( + IdentityAwareProxyAdminServiceAsyncClient, +) +from google.cloud.iap_v1.services.identity_aware_proxy_admin_service.client import ( + IdentityAwareProxyAdminServiceClient, +) +from google.cloud.iap_v1.services.identity_aware_proxy_o_auth_service.async_client import ( + IdentityAwareProxyOAuthServiceAsyncClient, +) +from google.cloud.iap_v1.services.identity_aware_proxy_o_auth_service.client import ( + IdentityAwareProxyOAuthServiceClient, +) +from google.cloud.iap_v1.types.service import ( + AccessDeniedPageSettings, + AccessSettings, + AllowedDomainsSettings, + ApplicationSettings, + AttributePropagationSettings, + Brand, + CorsSettings, + CreateBrandRequest, + CreateIdentityAwareProxyClientRequest, + CreateTunnelDestGroupRequest, + CsmSettings, + DeleteIdentityAwareProxyClientRequest, + DeleteTunnelDestGroupRequest, + GcipSettings, + GetBrandRequest, + GetIapSettingsRequest, + GetIdentityAwareProxyClientRequest, + GetTunnelDestGroupRequest, + IapSettings, + IdentityAwareProxyClient, + ListBrandsRequest, + ListBrandsResponse, + ListIdentityAwareProxyClientsRequest, + ListIdentityAwareProxyClientsResponse, + ListTunnelDestGroupsRequest, + ListTunnelDestGroupsResponse, + OAuthSettings, + ReauthSettings, + ResetIdentityAwareProxyClientSecretRequest, + TunnelDestGroup, + UpdateIapSettingsRequest, + UpdateTunnelDestGroupRequest, +) + +__all__ = ( + "IdentityAwareProxyAdminServiceClient", + "IdentityAwareProxyAdminServiceAsyncClient", + "IdentityAwareProxyOAuthServiceClient", + "IdentityAwareProxyOAuthServiceAsyncClient", + "AccessDeniedPageSettings", + "AccessSettings", + "AllowedDomainsSettings", + "ApplicationSettings", + "AttributePropagationSettings", + "Brand", + "CorsSettings", + "CreateBrandRequest", + "CreateIdentityAwareProxyClientRequest", + "CreateTunnelDestGroupRequest", + "CsmSettings", + "DeleteIdentityAwareProxyClientRequest", + "DeleteTunnelDestGroupRequest", + "GcipSettings", + "GetBrandRequest", + "GetIapSettingsRequest", + "GetIdentityAwareProxyClientRequest", + "GetTunnelDestGroupRequest", + "IapSettings", + "IdentityAwareProxyClient", + "ListBrandsRequest", + "ListBrandsResponse", + "ListIdentityAwareProxyClientsRequest", + "ListIdentityAwareProxyClientsResponse", + "ListTunnelDestGroupsRequest", + "ListTunnelDestGroupsResponse", + "OAuthSettings", + "ReauthSettings", + "ResetIdentityAwareProxyClientSecretRequest", + "TunnelDestGroup", + "UpdateIapSettingsRequest", + "UpdateTunnelDestGroupRequest", +) diff --git a/packages/google-cloud-iap/google/cloud/iap/gapic_version.py b/packages/google-cloud-iap/google/cloud/iap/gapic_version.py new file mode 100644 index 000000000000..6f9b28f95196 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.10.1" # {x-release-please-version} diff --git a/packages/google-cloud-iap/google/cloud/iap/py.typed b/packages/google-cloud-iap/google/cloud/iap/py.typed new file mode 100644 index 000000000000..90095aa69189 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-iap package uses inline types. diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/__init__.py b/packages/google-cloud-iap/google/cloud/iap_v1/__init__.py new file mode 100644 index 000000000000..c8ca40362dd2 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/__init__.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.iap_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.identity_aware_proxy_admin_service import ( + IdentityAwareProxyAdminServiceAsyncClient, + IdentityAwareProxyAdminServiceClient, +) +from .services.identity_aware_proxy_o_auth_service import ( + IdentityAwareProxyOAuthServiceAsyncClient, + IdentityAwareProxyOAuthServiceClient, +) +from .types.service import ( + AccessDeniedPageSettings, + AccessSettings, + AllowedDomainsSettings, + ApplicationSettings, + AttributePropagationSettings, + Brand, + CorsSettings, + CreateBrandRequest, + CreateIdentityAwareProxyClientRequest, + CreateTunnelDestGroupRequest, + CsmSettings, + DeleteIdentityAwareProxyClientRequest, + DeleteTunnelDestGroupRequest, + GcipSettings, + GetBrandRequest, + GetIapSettingsRequest, + GetIdentityAwareProxyClientRequest, + GetTunnelDestGroupRequest, + IapSettings, + IdentityAwareProxyClient, + ListBrandsRequest, + ListBrandsResponse, + ListIdentityAwareProxyClientsRequest, + ListIdentityAwareProxyClientsResponse, + ListTunnelDestGroupsRequest, + ListTunnelDestGroupsResponse, + OAuthSettings, + ReauthSettings, + ResetIdentityAwareProxyClientSecretRequest, + TunnelDestGroup, + UpdateIapSettingsRequest, + UpdateTunnelDestGroupRequest, +) + +__all__ = ( + "IdentityAwareProxyAdminServiceAsyncClient", + "IdentityAwareProxyOAuthServiceAsyncClient", + "AccessDeniedPageSettings", + "AccessSettings", + "AllowedDomainsSettings", + "ApplicationSettings", + "AttributePropagationSettings", + "Brand", + "CorsSettings", + "CreateBrandRequest", + "CreateIdentityAwareProxyClientRequest", + "CreateTunnelDestGroupRequest", + "CsmSettings", + "DeleteIdentityAwareProxyClientRequest", + "DeleteTunnelDestGroupRequest", + "GcipSettings", + "GetBrandRequest", + "GetIapSettingsRequest", + "GetIdentityAwareProxyClientRequest", + "GetTunnelDestGroupRequest", + "IapSettings", + "IdentityAwareProxyAdminServiceClient", + "IdentityAwareProxyClient", + "IdentityAwareProxyOAuthServiceClient", + "ListBrandsRequest", + "ListBrandsResponse", + "ListIdentityAwareProxyClientsRequest", + "ListIdentityAwareProxyClientsResponse", + "ListTunnelDestGroupsRequest", + "ListTunnelDestGroupsResponse", + "OAuthSettings", + "ReauthSettings", + "ResetIdentityAwareProxyClientSecretRequest", + "TunnelDestGroup", + "UpdateIapSettingsRequest", + "UpdateTunnelDestGroupRequest", +) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/gapic_metadata.json b/packages/google-cloud-iap/google/cloud/iap_v1/gapic_metadata.json new file mode 100644 index 000000000000..98e95fde4736 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/gapic_metadata.json @@ -0,0 +1,317 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.iap_v1", + "protoPackage": "google.cloud.iap.v1", + "schema": "1.0", + "services": { + "IdentityAwareProxyAdminService": { + "clients": { + "grpc": { + "libraryClient": "IdentityAwareProxyAdminServiceClient", + "rpcs": { + "CreateTunnelDestGroup": { + "methods": [ + "create_tunnel_dest_group" + ] + }, + "DeleteTunnelDestGroup": { + "methods": [ + "delete_tunnel_dest_group" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetIapSettings": { + "methods": [ + "get_iap_settings" + ] + }, + "GetTunnelDestGroup": { + "methods": [ + "get_tunnel_dest_group" + ] + }, + "ListTunnelDestGroups": { + "methods": [ + "list_tunnel_dest_groups" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateIapSettings": { + "methods": [ + "update_iap_settings" + ] + }, + "UpdateTunnelDestGroup": { + "methods": [ + "update_tunnel_dest_group" + ] + } + } + }, + "grpc-async": { + "libraryClient": "IdentityAwareProxyAdminServiceAsyncClient", + "rpcs": { + "CreateTunnelDestGroup": { + "methods": [ + "create_tunnel_dest_group" + ] + }, + "DeleteTunnelDestGroup": { + "methods": [ + "delete_tunnel_dest_group" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetIapSettings": { + "methods": [ + "get_iap_settings" + ] + }, + "GetTunnelDestGroup": { + "methods": [ + "get_tunnel_dest_group" + ] + }, + "ListTunnelDestGroups": { + "methods": [ + "list_tunnel_dest_groups" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateIapSettings": { + "methods": [ + "update_iap_settings" + ] + }, + "UpdateTunnelDestGroup": { + "methods": [ + "update_tunnel_dest_group" + ] + } + } + }, + "rest": { + "libraryClient": "IdentityAwareProxyAdminServiceClient", + "rpcs": { + "CreateTunnelDestGroup": { + "methods": [ + "create_tunnel_dest_group" + ] + }, + "DeleteTunnelDestGroup": { + "methods": [ + "delete_tunnel_dest_group" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetIapSettings": { + "methods": [ + "get_iap_settings" + ] + }, + "GetTunnelDestGroup": { + "methods": [ + "get_tunnel_dest_group" + ] + }, + "ListTunnelDestGroups": { + "methods": [ + "list_tunnel_dest_groups" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateIapSettings": { + "methods": [ + "update_iap_settings" + ] + }, + "UpdateTunnelDestGroup": { + "methods": [ + "update_tunnel_dest_group" + ] + } + } + } + } + }, + "IdentityAwareProxyOAuthService": { + "clients": { + "grpc": { + "libraryClient": "IdentityAwareProxyOAuthServiceClient", + "rpcs": { + "CreateBrand": { + "methods": [ + "create_brand" + ] + }, + "CreateIdentityAwareProxyClient": { + "methods": [ + "create_identity_aware_proxy_client" + ] + }, + "DeleteIdentityAwareProxyClient": { + "methods": [ + "delete_identity_aware_proxy_client" + ] + }, + "GetBrand": { + "methods": [ + "get_brand" + ] + }, + "GetIdentityAwareProxyClient": { + "methods": [ + "get_identity_aware_proxy_client" + ] + }, + "ListBrands": { + "methods": [ + "list_brands" + ] + }, + "ListIdentityAwareProxyClients": { + "methods": [ + "list_identity_aware_proxy_clients" + ] + }, + "ResetIdentityAwareProxyClientSecret": { + "methods": [ + "reset_identity_aware_proxy_client_secret" + ] + } + } + }, + "grpc-async": { + "libraryClient": "IdentityAwareProxyOAuthServiceAsyncClient", + "rpcs": { + "CreateBrand": { + "methods": [ + "create_brand" + ] + }, + "CreateIdentityAwareProxyClient": { + "methods": [ + "create_identity_aware_proxy_client" + ] + }, + "DeleteIdentityAwareProxyClient": { + "methods": [ + "delete_identity_aware_proxy_client" + ] + }, + "GetBrand": { + "methods": [ + "get_brand" + ] + }, + "GetIdentityAwareProxyClient": { + "methods": [ + "get_identity_aware_proxy_client" + ] + }, + "ListBrands": { + "methods": [ + "list_brands" + ] + }, + "ListIdentityAwareProxyClients": { + "methods": [ + "list_identity_aware_proxy_clients" + ] + }, + "ResetIdentityAwareProxyClientSecret": { + "methods": [ + "reset_identity_aware_proxy_client_secret" + ] + } + } + }, + "rest": { + "libraryClient": "IdentityAwareProxyOAuthServiceClient", + "rpcs": { + "CreateBrand": { + "methods": [ + "create_brand" + ] + }, + "CreateIdentityAwareProxyClient": { + "methods": [ + "create_identity_aware_proxy_client" + ] + }, + "DeleteIdentityAwareProxyClient": { + "methods": [ + "delete_identity_aware_proxy_client" + ] + }, + "GetBrand": { + "methods": [ + "get_brand" + ] + }, + "GetIdentityAwareProxyClient": { + "methods": [ + "get_identity_aware_proxy_client" + ] + }, + "ListBrands": { + "methods": [ + "list_brands" + ] + }, + "ListIdentityAwareProxyClients": { + "methods": [ + "list_identity_aware_proxy_clients" + ] + }, + "ResetIdentityAwareProxyClientSecret": { + "methods": [ + "reset_identity_aware_proxy_client_secret" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/gapic_version.py b/packages/google-cloud-iap/google/cloud/iap_v1/gapic_version.py new file mode 100644 index 000000000000..6f9b28f95196 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.10.1" # {x-release-please-version} diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/py.typed b/packages/google-cloud-iap/google/cloud/iap_v1/py.typed new file mode 100644 index 000000000000..90095aa69189 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-iap package uses inline types. diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/__init__.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/__init__.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/__init__.py new file mode 100644 index 000000000000..01c544c36d7e --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import IdentityAwareProxyAdminServiceAsyncClient +from .client import IdentityAwareProxyAdminServiceClient + +__all__ = ( + "IdentityAwareProxyAdminServiceClient", + "IdentityAwareProxyAdminServiceAsyncClient", +) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/async_client.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/async_client.py new file mode 100644 index 000000000000..cf81cea66722 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/async_client.py @@ -0,0 +1,1343 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.iap_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.iap_v1.services.identity_aware_proxy_admin_service import pagers +from google.cloud.iap_v1.types import service + +from .client import IdentityAwareProxyAdminServiceClient +from .transports.base import ( + DEFAULT_CLIENT_INFO, + IdentityAwareProxyAdminServiceTransport, +) +from .transports.grpc_asyncio import IdentityAwareProxyAdminServiceGrpcAsyncIOTransport + + +class IdentityAwareProxyAdminServiceAsyncClient: + """APIs for Identity-Aware Proxy Admin configurations.""" + + _client: IdentityAwareProxyAdminServiceClient + + DEFAULT_ENDPOINT = IdentityAwareProxyAdminServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = IdentityAwareProxyAdminServiceClient.DEFAULT_MTLS_ENDPOINT + + tunnel_dest_group_path = staticmethod( + IdentityAwareProxyAdminServiceClient.tunnel_dest_group_path + ) + parse_tunnel_dest_group_path = staticmethod( + IdentityAwareProxyAdminServiceClient.parse_tunnel_dest_group_path + ) + tunnel_location_path = staticmethod( + IdentityAwareProxyAdminServiceClient.tunnel_location_path + ) + parse_tunnel_location_path = staticmethod( + IdentityAwareProxyAdminServiceClient.parse_tunnel_location_path + ) + common_billing_account_path = staticmethod( + IdentityAwareProxyAdminServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + IdentityAwareProxyAdminServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + IdentityAwareProxyAdminServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + IdentityAwareProxyAdminServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + IdentityAwareProxyAdminServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + IdentityAwareProxyAdminServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + IdentityAwareProxyAdminServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + IdentityAwareProxyAdminServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + IdentityAwareProxyAdminServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + IdentityAwareProxyAdminServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IdentityAwareProxyAdminServiceAsyncClient: The constructed client. + """ + return IdentityAwareProxyAdminServiceClient.from_service_account_info.__func__(IdentityAwareProxyAdminServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IdentityAwareProxyAdminServiceAsyncClient: The constructed client. + """ + return IdentityAwareProxyAdminServiceClient.from_service_account_file.__func__(IdentityAwareProxyAdminServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return IdentityAwareProxyAdminServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> IdentityAwareProxyAdminServiceTransport: + """Returns the transport used by the client instance. + + Returns: + IdentityAwareProxyAdminServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(IdentityAwareProxyAdminServiceClient).get_transport_class, + type(IdentityAwareProxyAdminServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, IdentityAwareProxyAdminServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the identity aware proxy admin service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.IdentityAwareProxyAdminServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = IdentityAwareProxyAdminServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def set_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for an Identity-Aware Proxy + protected resource. Replaces any existing policy. More + information about managing access via IAP can be found at: + https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": [ "user:eve@example.com" ], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ], "etag": "BwWWja0YfJA=", "version": 3 + + } + + **YAML example:** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: + BwWWja0YfJA= version: 3 + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for an Identity-Aware Proxy + protected resource. More information about managing access via + IAP can be found at: + https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": [ "user:eve@example.com" ], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ], "etag": "BwWWja0YfJA=", "version": 3 + + } + + **YAML example:** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: + BwWWja0YfJA= version: 3 + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on the Identity-Aware + Proxy protected resource. More information about managing access + via IAP can be found at: + https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iap_settings( + self, + request: Optional[Union[service.GetIapSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IapSettings: + r"""Gets the IAP settings on a particular IAP protected + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + async def sample_get_iap_settings(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceAsyncClient() + + # Initialize request argument(s) + request = iap_v1.GetIapSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_iap_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.iap_v1.types.GetIapSettingsRequest, dict]]): + The request object. The request sent to GetIapSettings. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.IapSettings: + The IAP configurable settings. + """ + # Create or coerce a protobuf request object. + request = service.GetIapSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iap_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_iap_settings( + self, + request: Optional[Union[service.UpdateIapSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IapSettings: + r"""Updates the IAP settings on a particular IAP protected resource. + It replaces all fields unless the ``update_mask`` is set. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + async def sample_update_iap_settings(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceAsyncClient() + + # Initialize request argument(s) + iap_settings = iap_v1.IapSettings() + iap_settings.name = "name_value" + + request = iap_v1.UpdateIapSettingsRequest( + iap_settings=iap_settings, + ) + + # Make the request + response = await client.update_iap_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.iap_v1.types.UpdateIapSettingsRequest, dict]]): + The request object. The request sent to + UpdateIapSettings. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.IapSettings: + The IAP configurable settings. + """ + # Create or coerce a protobuf request object. + request = service.UpdateIapSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_iap_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("iap_settings.name", request.iap_settings.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_tunnel_dest_groups( + self, + request: Optional[Union[service.ListTunnelDestGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTunnelDestGroupsAsyncPager: + r"""Lists the existing TunnelDestGroups. To group across all + locations, use a ``-`` as the location ID. For example: + ``/v1/projects/123/iap_tunnel/locations/-/destGroups`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + async def sample_list_tunnel_dest_groups(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceAsyncClient() + + # Initialize request argument(s) + request = iap_v1.ListTunnelDestGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tunnel_dest_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.iap_v1.types.ListTunnelDestGroupsRequest, dict]]): + The request object. The request to ListTunnelDestGroups. + parent (:class:`str`): + Required. Google Cloud Project ID and location. In the + following format: + ``projects/{project_number/id}/iap_tunnel/locations/{location}``. + A ``-`` can be used for the location to group across all + locations. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.services.identity_aware_proxy_admin_service.pagers.ListTunnelDestGroupsAsyncPager: + The response from + ListTunnelDestGroups. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListTunnelDestGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tunnel_dest_groups, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTunnelDestGroupsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_tunnel_dest_group( + self, + request: Optional[Union[service.CreateTunnelDestGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + tunnel_dest_group: Optional[service.TunnelDestGroup] = None, + tunnel_dest_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.TunnelDestGroup: + r"""Creates a new TunnelDestGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + async def sample_create_tunnel_dest_group(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceAsyncClient() + + # Initialize request argument(s) + tunnel_dest_group = iap_v1.TunnelDestGroup() + tunnel_dest_group.name = "name_value" + + request = iap_v1.CreateTunnelDestGroupRequest( + parent="parent_value", + tunnel_dest_group=tunnel_dest_group, + tunnel_dest_group_id="tunnel_dest_group_id_value", + ) + + # Make the request + response = await client.create_tunnel_dest_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.iap_v1.types.CreateTunnelDestGroupRequest, dict]]): + The request object. The request to CreateTunnelDestGroup. + parent (:class:`str`): + Required. Google Cloud Project ID and location. In the + following format: + ``projects/{project_number/id}/iap_tunnel/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tunnel_dest_group (:class:`google.cloud.iap_v1.types.TunnelDestGroup`): + Required. The TunnelDestGroup to + create. + + This corresponds to the ``tunnel_dest_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tunnel_dest_group_id (:class:`str`): + Required. The ID to use for the TunnelDestGroup, which + becomes the final component of the resource name. + + This value must be 4-63 characters, and valid characters + are ``[a-z]-``. + + This corresponds to the ``tunnel_dest_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.TunnelDestGroup: + A TunnelDestGroup. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, tunnel_dest_group, tunnel_dest_group_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateTunnelDestGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tunnel_dest_group is not None: + request.tunnel_dest_group = tunnel_dest_group + if tunnel_dest_group_id is not None: + request.tunnel_dest_group_id = tunnel_dest_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_tunnel_dest_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_tunnel_dest_group( + self, + request: Optional[Union[service.GetTunnelDestGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.TunnelDestGroup: + r"""Retrieves an existing TunnelDestGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + async def sample_get_tunnel_dest_group(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceAsyncClient() + + # Initialize request argument(s) + request = iap_v1.GetTunnelDestGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_tunnel_dest_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.iap_v1.types.GetTunnelDestGroupRequest, dict]]): + The request object. The request to GetTunnelDestGroup. + name (:class:`str`): + Required. Name of the TunnelDestGroup to be fetched. In + the following format: + ``projects/{project_number/id}/iap_tunnel/locations/{location}/destGroups/{dest_group}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.TunnelDestGroup: + A TunnelDestGroup. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetTunnelDestGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_tunnel_dest_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_tunnel_dest_group( + self, + request: Optional[Union[service.DeleteTunnelDestGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a TunnelDestGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + async def sample_delete_tunnel_dest_group(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceAsyncClient() + + # Initialize request argument(s) + request = iap_v1.DeleteTunnelDestGroupRequest( + name="name_value", + ) + + # Make the request + await client.delete_tunnel_dest_group(request=request) + + Args: + request (Optional[Union[google.cloud.iap_v1.types.DeleteTunnelDestGroupRequest, dict]]): + The request object. The request to DeleteTunnelDestGroup. + name (:class:`str`): + Required. Name of the TunnelDestGroup to delete. In the + following format: + ``projects/{project_number/id}/iap_tunnel/locations/{location}/destGroups/{dest_group}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.DeleteTunnelDestGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_tunnel_dest_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_tunnel_dest_group( + self, + request: Optional[Union[service.UpdateTunnelDestGroupRequest, dict]] = None, + *, + tunnel_dest_group: Optional[service.TunnelDestGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.TunnelDestGroup: + r"""Updates a TunnelDestGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + async def sample_update_tunnel_dest_group(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceAsyncClient() + + # Initialize request argument(s) + tunnel_dest_group = iap_v1.TunnelDestGroup() + tunnel_dest_group.name = "name_value" + + request = iap_v1.UpdateTunnelDestGroupRequest( + tunnel_dest_group=tunnel_dest_group, + ) + + # Make the request + response = await client.update_tunnel_dest_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.iap_v1.types.UpdateTunnelDestGroupRequest, dict]]): + The request object. The request to UpdateTunnelDestGroup. + tunnel_dest_group (:class:`google.cloud.iap_v1.types.TunnelDestGroup`): + Required. The new values for the + TunnelDestGroup. + + This corresponds to the ``tunnel_dest_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + A field mask that specifies which IAP + settings to update. If omitted, then all + of the settings are updated. See + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.TunnelDestGroup: + A TunnelDestGroup. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tunnel_dest_group, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.UpdateTunnelDestGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tunnel_dest_group is not None: + request.tunnel_dest_group = tunnel_dest_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_tunnel_dest_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("tunnel_dest_group.name", request.tunnel_dest_group.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("IdentityAwareProxyAdminServiceAsyncClient",) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/client.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/client.py new file mode 100644 index 000000000000..48dcae8c2b12 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/client.py @@ -0,0 +1,1584 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.iap_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.iap_v1.services.identity_aware_proxy_admin_service import pagers +from google.cloud.iap_v1.types import service + +from .transports.base import ( + DEFAULT_CLIENT_INFO, + IdentityAwareProxyAdminServiceTransport, +) +from .transports.grpc import IdentityAwareProxyAdminServiceGrpcTransport +from .transports.grpc_asyncio import IdentityAwareProxyAdminServiceGrpcAsyncIOTransport +from .transports.rest import IdentityAwareProxyAdminServiceRestTransport + + +class IdentityAwareProxyAdminServiceClientMeta(type): + """Metaclass for the IdentityAwareProxyAdminService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[IdentityAwareProxyAdminServiceTransport]] + _transport_registry["grpc"] = IdentityAwareProxyAdminServiceGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = IdentityAwareProxyAdminServiceGrpcAsyncIOTransport + _transport_registry["rest"] = IdentityAwareProxyAdminServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[IdentityAwareProxyAdminServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class IdentityAwareProxyAdminServiceClient( + metaclass=IdentityAwareProxyAdminServiceClientMeta +): + """APIs for Identity-Aware Proxy Admin configurations.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "iap.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IdentityAwareProxyAdminServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IdentityAwareProxyAdminServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> IdentityAwareProxyAdminServiceTransport: + """Returns the transport used by the client instance. + + Returns: + IdentityAwareProxyAdminServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def tunnel_dest_group_path( + project: str, + location: str, + dest_group: str, + ) -> str: + """Returns a fully-qualified tunnel_dest_group string.""" + return "projects/{project}/iap_tunnel/locations/{location}/destGroups/{dest_group}".format( + project=project, + location=location, + dest_group=dest_group, + ) + + @staticmethod + def parse_tunnel_dest_group_path(path: str) -> Dict[str, str]: + """Parses a tunnel_dest_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/iap_tunnel/locations/(?P.+?)/destGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def tunnel_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified tunnel_location string.""" + return "projects/{project}/iap_tunnel/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_tunnel_location_path(path: str) -> Dict[str, str]: + """Parses a tunnel_location path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/iap_tunnel/locations/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, IdentityAwareProxyAdminServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the identity aware proxy admin service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, IdentityAwareProxyAdminServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, IdentityAwareProxyAdminServiceTransport): + # transport is a IdentityAwareProxyAdminServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def set_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for an Identity-Aware Proxy + protected resource. Replaces any existing policy. More + information about managing access via IAP can be found at: + https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": [ "user:eve@example.com" ], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ], "etag": "BwWWja0YfJA=", "version": 3 + + } + + **YAML example:** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: + BwWWja0YfJA= version: 3 + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for an Identity-Aware Proxy + protected resource. More information about managing access via + IAP can be found at: + https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": [ "user:eve@example.com" ], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ], "etag": "BwWWja0YfJA=", "version": 3 + + } + + **YAML example:** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: + BwWWja0YfJA= version: 3 + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on the Identity-Aware + Proxy protected resource. More information about managing access + via IAP can be found at: + https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iap_settings( + self, + request: Optional[Union[service.GetIapSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IapSettings: + r"""Gets the IAP settings on a particular IAP protected + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + def sample_get_iap_settings(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceClient() + + # Initialize request argument(s) + request = iap_v1.GetIapSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_iap_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.iap_v1.types.GetIapSettingsRequest, dict]): + The request object. The request sent to GetIapSettings. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.IapSettings: + The IAP configurable settings. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.GetIapSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetIapSettingsRequest): + request = service.GetIapSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iap_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_iap_settings( + self, + request: Optional[Union[service.UpdateIapSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IapSettings: + r"""Updates the IAP settings on a particular IAP protected resource. + It replaces all fields unless the ``update_mask`` is set. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + def sample_update_iap_settings(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceClient() + + # Initialize request argument(s) + iap_settings = iap_v1.IapSettings() + iap_settings.name = "name_value" + + request = iap_v1.UpdateIapSettingsRequest( + iap_settings=iap_settings, + ) + + # Make the request + response = client.update_iap_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.iap_v1.types.UpdateIapSettingsRequest, dict]): + The request object. The request sent to + UpdateIapSettings. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.IapSettings: + The IAP configurable settings. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateIapSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateIapSettingsRequest): + request = service.UpdateIapSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_iap_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("iap_settings.name", request.iap_settings.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_tunnel_dest_groups( + self, + request: Optional[Union[service.ListTunnelDestGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTunnelDestGroupsPager: + r"""Lists the existing TunnelDestGroups. To group across all + locations, use a ``-`` as the location ID. For example: + ``/v1/projects/123/iap_tunnel/locations/-/destGroups`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + def sample_list_tunnel_dest_groups(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceClient() + + # Initialize request argument(s) + request = iap_v1.ListTunnelDestGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tunnel_dest_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.iap_v1.types.ListTunnelDestGroupsRequest, dict]): + The request object. The request to ListTunnelDestGroups. + parent (str): + Required. Google Cloud Project ID and location. In the + following format: + ``projects/{project_number/id}/iap_tunnel/locations/{location}``. + A ``-`` can be used for the location to group across all + locations. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.services.identity_aware_proxy_admin_service.pagers.ListTunnelDestGroupsPager: + The response from + ListTunnelDestGroups. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListTunnelDestGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListTunnelDestGroupsRequest): + request = service.ListTunnelDestGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tunnel_dest_groups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTunnelDestGroupsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_tunnel_dest_group( + self, + request: Optional[Union[service.CreateTunnelDestGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + tunnel_dest_group: Optional[service.TunnelDestGroup] = None, + tunnel_dest_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.TunnelDestGroup: + r"""Creates a new TunnelDestGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + def sample_create_tunnel_dest_group(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceClient() + + # Initialize request argument(s) + tunnel_dest_group = iap_v1.TunnelDestGroup() + tunnel_dest_group.name = "name_value" + + request = iap_v1.CreateTunnelDestGroupRequest( + parent="parent_value", + tunnel_dest_group=tunnel_dest_group, + tunnel_dest_group_id="tunnel_dest_group_id_value", + ) + + # Make the request + response = client.create_tunnel_dest_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.iap_v1.types.CreateTunnelDestGroupRequest, dict]): + The request object. The request to CreateTunnelDestGroup. + parent (str): + Required. Google Cloud Project ID and location. In the + following format: + ``projects/{project_number/id}/iap_tunnel/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tunnel_dest_group (google.cloud.iap_v1.types.TunnelDestGroup): + Required. The TunnelDestGroup to + create. + + This corresponds to the ``tunnel_dest_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tunnel_dest_group_id (str): + Required. The ID to use for the TunnelDestGroup, which + becomes the final component of the resource name. + + This value must be 4-63 characters, and valid characters + are ``[a-z]-``. + + This corresponds to the ``tunnel_dest_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.TunnelDestGroup: + A TunnelDestGroup. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, tunnel_dest_group, tunnel_dest_group_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateTunnelDestGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateTunnelDestGroupRequest): + request = service.CreateTunnelDestGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tunnel_dest_group is not None: + request.tunnel_dest_group = tunnel_dest_group + if tunnel_dest_group_id is not None: + request.tunnel_dest_group_id = tunnel_dest_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_tunnel_dest_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_tunnel_dest_group( + self, + request: Optional[Union[service.GetTunnelDestGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.TunnelDestGroup: + r"""Retrieves an existing TunnelDestGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + def sample_get_tunnel_dest_group(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceClient() + + # Initialize request argument(s) + request = iap_v1.GetTunnelDestGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_tunnel_dest_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.iap_v1.types.GetTunnelDestGroupRequest, dict]): + The request object. The request to GetTunnelDestGroup. + name (str): + Required. Name of the TunnelDestGroup to be fetched. In + the following format: + ``projects/{project_number/id}/iap_tunnel/locations/{location}/destGroups/{dest_group}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.TunnelDestGroup: + A TunnelDestGroup. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetTunnelDestGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetTunnelDestGroupRequest): + request = service.GetTunnelDestGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_tunnel_dest_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_tunnel_dest_group( + self, + request: Optional[Union[service.DeleteTunnelDestGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a TunnelDestGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + def sample_delete_tunnel_dest_group(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceClient() + + # Initialize request argument(s) + request = iap_v1.DeleteTunnelDestGroupRequest( + name="name_value", + ) + + # Make the request + client.delete_tunnel_dest_group(request=request) + + Args: + request (Union[google.cloud.iap_v1.types.DeleteTunnelDestGroupRequest, dict]): + The request object. The request to DeleteTunnelDestGroup. + name (str): + Required. Name of the TunnelDestGroup to delete. In the + following format: + ``projects/{project_number/id}/iap_tunnel/locations/{location}/destGroups/{dest_group}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteTunnelDestGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteTunnelDestGroupRequest): + request = service.DeleteTunnelDestGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_tunnel_dest_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_tunnel_dest_group( + self, + request: Optional[Union[service.UpdateTunnelDestGroupRequest, dict]] = None, + *, + tunnel_dest_group: Optional[service.TunnelDestGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.TunnelDestGroup: + r"""Updates a TunnelDestGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + def sample_update_tunnel_dest_group(): + # Create a client + client = iap_v1.IdentityAwareProxyAdminServiceClient() + + # Initialize request argument(s) + tunnel_dest_group = iap_v1.TunnelDestGroup() + tunnel_dest_group.name = "name_value" + + request = iap_v1.UpdateTunnelDestGroupRequest( + tunnel_dest_group=tunnel_dest_group, + ) + + # Make the request + response = client.update_tunnel_dest_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.iap_v1.types.UpdateTunnelDestGroupRequest, dict]): + The request object. The request to UpdateTunnelDestGroup. + tunnel_dest_group (google.cloud.iap_v1.types.TunnelDestGroup): + Required. The new values for the + TunnelDestGroup. + + This corresponds to the ``tunnel_dest_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A field mask that specifies which IAP + settings to update. If omitted, then all + of the settings are updated. See + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.TunnelDestGroup: + A TunnelDestGroup. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tunnel_dest_group, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateTunnelDestGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateTunnelDestGroupRequest): + request = service.UpdateTunnelDestGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tunnel_dest_group is not None: + request.tunnel_dest_group = tunnel_dest_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_tunnel_dest_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("tunnel_dest_group.name", request.tunnel_dest_group.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "IdentityAwareProxyAdminServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("IdentityAwareProxyAdminServiceClient",) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/pagers.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/pagers.py new file mode 100644 index 000000000000..55d09d6d7ce0 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.iap_v1.types import service + + +class ListTunnelDestGroupsPager: + """A pager for iterating through ``list_tunnel_dest_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.iap_v1.types.ListTunnelDestGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tunnel_dest_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTunnelDestGroups`` requests and continue to iterate + through the ``tunnel_dest_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.iap_v1.types.ListTunnelDestGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListTunnelDestGroupsResponse], + request: service.ListTunnelDestGroupsRequest, + response: service.ListTunnelDestGroupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.iap_v1.types.ListTunnelDestGroupsRequest): + The initial request object. + response (google.cloud.iap_v1.types.ListTunnelDestGroupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListTunnelDestGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListTunnelDestGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[service.TunnelDestGroup]: + for page in self.pages: + yield from page.tunnel_dest_groups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTunnelDestGroupsAsyncPager: + """A pager for iterating through ``list_tunnel_dest_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.iap_v1.types.ListTunnelDestGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tunnel_dest_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTunnelDestGroups`` requests and continue to iterate + through the ``tunnel_dest_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.iap_v1.types.ListTunnelDestGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListTunnelDestGroupsResponse]], + request: service.ListTunnelDestGroupsRequest, + response: service.ListTunnelDestGroupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.iap_v1.types.ListTunnelDestGroupsRequest): + The initial request object. + response (google.cloud.iap_v1.types.ListTunnelDestGroupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListTunnelDestGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListTunnelDestGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[service.TunnelDestGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.tunnel_dest_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/__init__.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/__init__.py new file mode 100644 index 000000000000..5a269ec69ff8 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import IdentityAwareProxyAdminServiceTransport +from .grpc import IdentityAwareProxyAdminServiceGrpcTransport +from .grpc_asyncio import IdentityAwareProxyAdminServiceGrpcAsyncIOTransport +from .rest import ( + IdentityAwareProxyAdminServiceRestInterceptor, + IdentityAwareProxyAdminServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[IdentityAwareProxyAdminServiceTransport]] +_transport_registry["grpc"] = IdentityAwareProxyAdminServiceGrpcTransport +_transport_registry["grpc_asyncio"] = IdentityAwareProxyAdminServiceGrpcAsyncIOTransport +_transport_registry["rest"] = IdentityAwareProxyAdminServiceRestTransport + +__all__ = ( + "IdentityAwareProxyAdminServiceTransport", + "IdentityAwareProxyAdminServiceGrpcTransport", + "IdentityAwareProxyAdminServiceGrpcAsyncIOTransport", + "IdentityAwareProxyAdminServiceRestTransport", + "IdentityAwareProxyAdminServiceRestInterceptor", +) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/base.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/base.py new file mode 100644 index 000000000000..9e5553c3327c --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/base.py @@ -0,0 +1,290 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.iap_v1 import gapic_version as package_version +from google.cloud.iap_v1.types import service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class IdentityAwareProxyAdminServiceTransport(abc.ABC): + """Abstract transport class for IdentityAwareProxyAdminService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "iap.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.get_iap_settings: gapic_v1.method.wrap_method( + self.get_iap_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_iap_settings: gapic_v1.method.wrap_method( + self.update_iap_settings, + default_timeout=None, + client_info=client_info, + ), + self.list_tunnel_dest_groups: gapic_v1.method.wrap_method( + self.list_tunnel_dest_groups, + default_timeout=None, + client_info=client_info, + ), + self.create_tunnel_dest_group: gapic_v1.method.wrap_method( + self.create_tunnel_dest_group, + default_timeout=None, + client_info=client_info, + ), + self.get_tunnel_dest_group: gapic_v1.method.wrap_method( + self.get_tunnel_dest_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_tunnel_dest_group: gapic_v1.method.wrap_method( + self.delete_tunnel_dest_group, + default_timeout=None, + client_info=client_info, + ), + self.update_tunnel_dest_group: gapic_v1.method.wrap_method( + self.update_tunnel_dest_group, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_iap_settings( + self, + ) -> Callable[ + [service.GetIapSettingsRequest], + Union[service.IapSettings, Awaitable[service.IapSettings]], + ]: + raise NotImplementedError() + + @property + def update_iap_settings( + self, + ) -> Callable[ + [service.UpdateIapSettingsRequest], + Union[service.IapSettings, Awaitable[service.IapSettings]], + ]: + raise NotImplementedError() + + @property + def list_tunnel_dest_groups( + self, + ) -> Callable[ + [service.ListTunnelDestGroupsRequest], + Union[ + service.ListTunnelDestGroupsResponse, + Awaitable[service.ListTunnelDestGroupsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_tunnel_dest_group( + self, + ) -> Callable[ + [service.CreateTunnelDestGroupRequest], + Union[service.TunnelDestGroup, Awaitable[service.TunnelDestGroup]], + ]: + raise NotImplementedError() + + @property + def get_tunnel_dest_group( + self, + ) -> Callable[ + [service.GetTunnelDestGroupRequest], + Union[service.TunnelDestGroup, Awaitable[service.TunnelDestGroup]], + ]: + raise NotImplementedError() + + @property + def delete_tunnel_dest_group( + self, + ) -> Callable[ + [service.DeleteTunnelDestGroupRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def update_tunnel_dest_group( + self, + ) -> Callable[ + [service.UpdateTunnelDestGroupRequest], + Union[service.TunnelDestGroup, Awaitable[service.TunnelDestGroup]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("IdentityAwareProxyAdminServiceTransport",) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/grpc.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/grpc.py new file mode 100644 index 000000000000..742bcbf6b2dd --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/grpc.py @@ -0,0 +1,522 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.iap_v1.types import service + +from .base import DEFAULT_CLIENT_INFO, IdentityAwareProxyAdminServiceTransport + + +class IdentityAwareProxyAdminServiceGrpcTransport( + IdentityAwareProxyAdminServiceTransport +): + """gRPC backend transport for IdentityAwareProxyAdminService. + + APIs for Identity-Aware Proxy Admin configurations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "iap.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "iap.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for an Identity-Aware Proxy + protected resource. Replaces any existing policy. More + information about managing access via IAP can be found at: + https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for an Identity-Aware Proxy + protected resource. More information about managing access via + IAP can be found at: + https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on the Identity-Aware + Proxy protected resource. More information about managing access + via IAP can be found at: + https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def get_iap_settings( + self, + ) -> Callable[[service.GetIapSettingsRequest], service.IapSettings]: + r"""Return a callable for the get iap settings method over gRPC. + + Gets the IAP settings on a particular IAP protected + resource. + + Returns: + Callable[[~.GetIapSettingsRequest], + ~.IapSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iap_settings" not in self._stubs: + self._stubs["get_iap_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/GetIapSettings", + request_serializer=service.GetIapSettingsRequest.serialize, + response_deserializer=service.IapSettings.deserialize, + ) + return self._stubs["get_iap_settings"] + + @property + def update_iap_settings( + self, + ) -> Callable[[service.UpdateIapSettingsRequest], service.IapSettings]: + r"""Return a callable for the update iap settings method over gRPC. + + Updates the IAP settings on a particular IAP protected resource. + It replaces all fields unless the ``update_mask`` is set. + + Returns: + Callable[[~.UpdateIapSettingsRequest], + ~.IapSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_iap_settings" not in self._stubs: + self._stubs["update_iap_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/UpdateIapSettings", + request_serializer=service.UpdateIapSettingsRequest.serialize, + response_deserializer=service.IapSettings.deserialize, + ) + return self._stubs["update_iap_settings"] + + @property + def list_tunnel_dest_groups( + self, + ) -> Callable[ + [service.ListTunnelDestGroupsRequest], service.ListTunnelDestGroupsResponse + ]: + r"""Return a callable for the list tunnel dest groups method over gRPC. + + Lists the existing TunnelDestGroups. To group across all + locations, use a ``-`` as the location ID. For example: + ``/v1/projects/123/iap_tunnel/locations/-/destGroups`` + + Returns: + Callable[[~.ListTunnelDestGroupsRequest], + ~.ListTunnelDestGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tunnel_dest_groups" not in self._stubs: + self._stubs["list_tunnel_dest_groups"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/ListTunnelDestGroups", + request_serializer=service.ListTunnelDestGroupsRequest.serialize, + response_deserializer=service.ListTunnelDestGroupsResponse.deserialize, + ) + return self._stubs["list_tunnel_dest_groups"] + + @property + def create_tunnel_dest_group( + self, + ) -> Callable[[service.CreateTunnelDestGroupRequest], service.TunnelDestGroup]: + r"""Return a callable for the create tunnel dest group method over gRPC. + + Creates a new TunnelDestGroup. + + Returns: + Callable[[~.CreateTunnelDestGroupRequest], + ~.TunnelDestGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_tunnel_dest_group" not in self._stubs: + self._stubs["create_tunnel_dest_group"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/CreateTunnelDestGroup", + request_serializer=service.CreateTunnelDestGroupRequest.serialize, + response_deserializer=service.TunnelDestGroup.deserialize, + ) + return self._stubs["create_tunnel_dest_group"] + + @property + def get_tunnel_dest_group( + self, + ) -> Callable[[service.GetTunnelDestGroupRequest], service.TunnelDestGroup]: + r"""Return a callable for the get tunnel dest group method over gRPC. + + Retrieves an existing TunnelDestGroup. + + Returns: + Callable[[~.GetTunnelDestGroupRequest], + ~.TunnelDestGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_tunnel_dest_group" not in self._stubs: + self._stubs["get_tunnel_dest_group"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/GetTunnelDestGroup", + request_serializer=service.GetTunnelDestGroupRequest.serialize, + response_deserializer=service.TunnelDestGroup.deserialize, + ) + return self._stubs["get_tunnel_dest_group"] + + @property + def delete_tunnel_dest_group( + self, + ) -> Callable[[service.DeleteTunnelDestGroupRequest], empty_pb2.Empty]: + r"""Return a callable for the delete tunnel dest group method over gRPC. + + Deletes a TunnelDestGroup. + + Returns: + Callable[[~.DeleteTunnelDestGroupRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_tunnel_dest_group" not in self._stubs: + self._stubs["delete_tunnel_dest_group"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/DeleteTunnelDestGroup", + request_serializer=service.DeleteTunnelDestGroupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_tunnel_dest_group"] + + @property + def update_tunnel_dest_group( + self, + ) -> Callable[[service.UpdateTunnelDestGroupRequest], service.TunnelDestGroup]: + r"""Return a callable for the update tunnel dest group method over gRPC. + + Updates a TunnelDestGroup. + + Returns: + Callable[[~.UpdateTunnelDestGroupRequest], + ~.TunnelDestGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_tunnel_dest_group" not in self._stubs: + self._stubs["update_tunnel_dest_group"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/UpdateTunnelDestGroup", + request_serializer=service.UpdateTunnelDestGroupRequest.serialize, + response_deserializer=service.TunnelDestGroup.deserialize, + ) + return self._stubs["update_tunnel_dest_group"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("IdentityAwareProxyAdminServiceGrpcTransport",) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/grpc_asyncio.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..d55615fdaf7b --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/grpc_asyncio.py @@ -0,0 +1,528 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.iap_v1.types import service + +from .base import DEFAULT_CLIENT_INFO, IdentityAwareProxyAdminServiceTransport +from .grpc import IdentityAwareProxyAdminServiceGrpcTransport + + +class IdentityAwareProxyAdminServiceGrpcAsyncIOTransport( + IdentityAwareProxyAdminServiceTransport +): + """gRPC AsyncIO backend transport for IdentityAwareProxyAdminService. + + APIs for Identity-Aware Proxy Admin configurations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "iap.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "iap.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for an Identity-Aware Proxy + protected resource. Replaces any existing policy. More + information about managing access via IAP can be found at: + https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for an Identity-Aware Proxy + protected resource. More information about managing access via + IAP can be found at: + https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on the Identity-Aware + Proxy protected resource. More information about managing access + via IAP can be found at: + https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def get_iap_settings( + self, + ) -> Callable[[service.GetIapSettingsRequest], Awaitable[service.IapSettings]]: + r"""Return a callable for the get iap settings method over gRPC. + + Gets the IAP settings on a particular IAP protected + resource. + + Returns: + Callable[[~.GetIapSettingsRequest], + Awaitable[~.IapSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iap_settings" not in self._stubs: + self._stubs["get_iap_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/GetIapSettings", + request_serializer=service.GetIapSettingsRequest.serialize, + response_deserializer=service.IapSettings.deserialize, + ) + return self._stubs["get_iap_settings"] + + @property + def update_iap_settings( + self, + ) -> Callable[[service.UpdateIapSettingsRequest], Awaitable[service.IapSettings]]: + r"""Return a callable for the update iap settings method over gRPC. + + Updates the IAP settings on a particular IAP protected resource. + It replaces all fields unless the ``update_mask`` is set. + + Returns: + Callable[[~.UpdateIapSettingsRequest], + Awaitable[~.IapSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_iap_settings" not in self._stubs: + self._stubs["update_iap_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/UpdateIapSettings", + request_serializer=service.UpdateIapSettingsRequest.serialize, + response_deserializer=service.IapSettings.deserialize, + ) + return self._stubs["update_iap_settings"] + + @property + def list_tunnel_dest_groups( + self, + ) -> Callable[ + [service.ListTunnelDestGroupsRequest], + Awaitable[service.ListTunnelDestGroupsResponse], + ]: + r"""Return a callable for the list tunnel dest groups method over gRPC. + + Lists the existing TunnelDestGroups. To group across all + locations, use a ``-`` as the location ID. For example: + ``/v1/projects/123/iap_tunnel/locations/-/destGroups`` + + Returns: + Callable[[~.ListTunnelDestGroupsRequest], + Awaitable[~.ListTunnelDestGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tunnel_dest_groups" not in self._stubs: + self._stubs["list_tunnel_dest_groups"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/ListTunnelDestGroups", + request_serializer=service.ListTunnelDestGroupsRequest.serialize, + response_deserializer=service.ListTunnelDestGroupsResponse.deserialize, + ) + return self._stubs["list_tunnel_dest_groups"] + + @property + def create_tunnel_dest_group( + self, + ) -> Callable[ + [service.CreateTunnelDestGroupRequest], Awaitable[service.TunnelDestGroup] + ]: + r"""Return a callable for the create tunnel dest group method over gRPC. + + Creates a new TunnelDestGroup. + + Returns: + Callable[[~.CreateTunnelDestGroupRequest], + Awaitable[~.TunnelDestGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_tunnel_dest_group" not in self._stubs: + self._stubs["create_tunnel_dest_group"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/CreateTunnelDestGroup", + request_serializer=service.CreateTunnelDestGroupRequest.serialize, + response_deserializer=service.TunnelDestGroup.deserialize, + ) + return self._stubs["create_tunnel_dest_group"] + + @property + def get_tunnel_dest_group( + self, + ) -> Callable[ + [service.GetTunnelDestGroupRequest], Awaitable[service.TunnelDestGroup] + ]: + r"""Return a callable for the get tunnel dest group method over gRPC. + + Retrieves an existing TunnelDestGroup. + + Returns: + Callable[[~.GetTunnelDestGroupRequest], + Awaitable[~.TunnelDestGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_tunnel_dest_group" not in self._stubs: + self._stubs["get_tunnel_dest_group"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/GetTunnelDestGroup", + request_serializer=service.GetTunnelDestGroupRequest.serialize, + response_deserializer=service.TunnelDestGroup.deserialize, + ) + return self._stubs["get_tunnel_dest_group"] + + @property + def delete_tunnel_dest_group( + self, + ) -> Callable[[service.DeleteTunnelDestGroupRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete tunnel dest group method over gRPC. + + Deletes a TunnelDestGroup. + + Returns: + Callable[[~.DeleteTunnelDestGroupRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_tunnel_dest_group" not in self._stubs: + self._stubs["delete_tunnel_dest_group"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/DeleteTunnelDestGroup", + request_serializer=service.DeleteTunnelDestGroupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_tunnel_dest_group"] + + @property + def update_tunnel_dest_group( + self, + ) -> Callable[ + [service.UpdateTunnelDestGroupRequest], Awaitable[service.TunnelDestGroup] + ]: + r"""Return a callable for the update tunnel dest group method over gRPC. + + Updates a TunnelDestGroup. + + Returns: + Callable[[~.UpdateTunnelDestGroupRequest], + Awaitable[~.TunnelDestGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_tunnel_dest_group" not in self._stubs: + self._stubs["update_tunnel_dest_group"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyAdminService/UpdateTunnelDestGroup", + request_serializer=service.UpdateTunnelDestGroupRequest.serialize, + response_deserializer=service.TunnelDestGroup.deserialize, + ) + return self._stubs["update_tunnel_dest_group"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("IdentityAwareProxyAdminServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/rest.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/rest.py new file mode 100644 index 000000000000..2eead816f2b0 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/rest.py @@ -0,0 +1,1634 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.iap_v1.types import service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import IdentityAwareProxyAdminServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class IdentityAwareProxyAdminServiceRestInterceptor: + """Interceptor for IdentityAwareProxyAdminService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the IdentityAwareProxyAdminServiceRestTransport. + + .. code-block:: python + class MyCustomIdentityAwareProxyAdminServiceInterceptor(IdentityAwareProxyAdminServiceRestInterceptor): + def pre_create_tunnel_dest_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_tunnel_dest_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_tunnel_dest_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iap_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iap_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_tunnel_dest_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_tunnel_dest_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_tunnel_dest_groups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_tunnel_dest_groups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_iap_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_iap_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_tunnel_dest_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_tunnel_dest_group(self, response): + logging.log(f"Received response: {response}") + return response + + transport = IdentityAwareProxyAdminServiceRestTransport(interceptor=MyCustomIdentityAwareProxyAdminServiceInterceptor()) + client = IdentityAwareProxyAdminServiceClient(transport=transport) + + + """ + + def pre_create_tunnel_dest_group( + self, + request: service.CreateTunnelDestGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.CreateTunnelDestGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_tunnel_dest_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyAdminService server. + """ + return request, metadata + + def post_create_tunnel_dest_group( + self, response: service.TunnelDestGroup + ) -> service.TunnelDestGroup: + """Post-rpc interceptor for create_tunnel_dest_group + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyAdminService server but before + it is returned to user code. + """ + return response + + def pre_delete_tunnel_dest_group( + self, + request: service.DeleteTunnelDestGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.DeleteTunnelDestGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_tunnel_dest_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyAdminService server. + """ + return request, metadata + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyAdminService server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyAdminService server but before + it is returned to user code. + """ + return response + + def pre_get_iap_settings( + self, + request: service.GetIapSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.GetIapSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iap_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyAdminService server. + """ + return request, metadata + + def post_get_iap_settings( + self, response: service.IapSettings + ) -> service.IapSettings: + """Post-rpc interceptor for get_iap_settings + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyAdminService server but before + it is returned to user code. + """ + return response + + def pre_get_tunnel_dest_group( + self, + request: service.GetTunnelDestGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.GetTunnelDestGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_tunnel_dest_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyAdminService server. + """ + return request, metadata + + def post_get_tunnel_dest_group( + self, response: service.TunnelDestGroup + ) -> service.TunnelDestGroup: + """Post-rpc interceptor for get_tunnel_dest_group + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyAdminService server but before + it is returned to user code. + """ + return response + + def pre_list_tunnel_dest_groups( + self, + request: service.ListTunnelDestGroupsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.ListTunnelDestGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_tunnel_dest_groups + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyAdminService server. + """ + return request, metadata + + def post_list_tunnel_dest_groups( + self, response: service.ListTunnelDestGroupsResponse + ) -> service.ListTunnelDestGroupsResponse: + """Post-rpc interceptor for list_tunnel_dest_groups + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyAdminService server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyAdminService server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyAdminService server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyAdminService server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyAdminService server but before + it is returned to user code. + """ + return response + + def pre_update_iap_settings( + self, + request: service.UpdateIapSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.UpdateIapSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_iap_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyAdminService server. + """ + return request, metadata + + def post_update_iap_settings( + self, response: service.IapSettings + ) -> service.IapSettings: + """Post-rpc interceptor for update_iap_settings + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyAdminService server but before + it is returned to user code. + """ + return response + + def pre_update_tunnel_dest_group( + self, + request: service.UpdateTunnelDestGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.UpdateTunnelDestGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_tunnel_dest_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyAdminService server. + """ + return request, metadata + + def post_update_tunnel_dest_group( + self, response: service.TunnelDestGroup + ) -> service.TunnelDestGroup: + """Post-rpc interceptor for update_tunnel_dest_group + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyAdminService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class IdentityAwareProxyAdminServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: IdentityAwareProxyAdminServiceRestInterceptor + + +class IdentityAwareProxyAdminServiceRestTransport( + IdentityAwareProxyAdminServiceTransport +): + """REST backend transport for IdentityAwareProxyAdminService. + + APIs for Identity-Aware Proxy Admin configurations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "iap.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[IdentityAwareProxyAdminServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = ( + interceptor or IdentityAwareProxyAdminServiceRestInterceptor() + ) + self._prep_wrapped_messages(client_info) + + class _CreateTunnelDestGroup(IdentityAwareProxyAdminServiceRestStub): + def __hash__(self): + return hash("CreateTunnelDestGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "tunnelDestGroupId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateTunnelDestGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.TunnelDestGroup: + r"""Call the create tunnel dest group method over HTTP. + + Args: + request (~.service.CreateTunnelDestGroupRequest): + The request object. The request to CreateTunnelDestGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.TunnelDestGroup: + A TunnelDestGroup. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/iap_tunnel/locations/*}/destGroups", + "body": "tunnel_dest_group", + }, + ] + request, metadata = self._interceptor.pre_create_tunnel_dest_group( + request, metadata + ) + pb_request = service.CreateTunnelDestGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.TunnelDestGroup() + pb_resp = service.TunnelDestGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_tunnel_dest_group(resp) + return resp + + class _DeleteTunnelDestGroup(IdentityAwareProxyAdminServiceRestStub): + def __hash__(self): + return hash("DeleteTunnelDestGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.DeleteTunnelDestGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete tunnel dest group method over HTTP. + + Args: + request (~.service.DeleteTunnelDestGroupRequest): + The request object. The request to DeleteTunnelDestGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/iap_tunnel/locations/*/destGroups/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_tunnel_dest_group( + request, metadata + ) + pb_request = service.DeleteTunnelDestGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetIamPolicy(IdentityAwareProxyAdminServiceRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=**}:getIamPolicy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _GetIapSettings(IdentityAwareProxyAdminServiceRestStub): + def __hash__(self): + return hash("GetIapSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetIapSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IapSettings: + r"""Call the get iap settings method over HTTP. + + Args: + request (~.service.GetIapSettingsRequest): + The request object. The request sent to GetIapSettings. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.IapSettings: + The IAP configurable settings. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=**}:iapSettings", + }, + ] + request, metadata = self._interceptor.pre_get_iap_settings( + request, metadata + ) + pb_request = service.GetIapSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.IapSettings() + pb_resp = service.IapSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iap_settings(resp) + return resp + + class _GetTunnelDestGroup(IdentityAwareProxyAdminServiceRestStub): + def __hash__(self): + return hash("GetTunnelDestGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetTunnelDestGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.TunnelDestGroup: + r"""Call the get tunnel dest group method over HTTP. + + Args: + request (~.service.GetTunnelDestGroupRequest): + The request object. The request to GetTunnelDestGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.TunnelDestGroup: + A TunnelDestGroup. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/iap_tunnel/locations/*/destGroups/*}", + }, + ] + request, metadata = self._interceptor.pre_get_tunnel_dest_group( + request, metadata + ) + pb_request = service.GetTunnelDestGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.TunnelDestGroup() + pb_resp = service.TunnelDestGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_tunnel_dest_group(resp) + return resp + + class _ListTunnelDestGroups(IdentityAwareProxyAdminServiceRestStub): + def __hash__(self): + return hash("ListTunnelDestGroups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListTunnelDestGroupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListTunnelDestGroupsResponse: + r"""Call the list tunnel dest groups method over HTTP. + + Args: + request (~.service.ListTunnelDestGroupsRequest): + The request object. The request to ListTunnelDestGroups. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListTunnelDestGroupsResponse: + The response from + ListTunnelDestGroups. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/iap_tunnel/locations/*}/destGroups", + }, + ] + request, metadata = self._interceptor.pre_list_tunnel_dest_groups( + request, metadata + ) + pb_request = service.ListTunnelDestGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListTunnelDestGroupsResponse() + pb_resp = service.ListTunnelDestGroupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_tunnel_dest_groups(resp) + return resp + + class _SetIamPolicy(IdentityAwareProxyAdminServiceRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=**}:setIamPolicy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(IdentityAwareProxyAdminServiceRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=**}:testIamPermissions", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = iam_policy_pb2.TestIamPermissionsResponse() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + class _UpdateIapSettings(IdentityAwareProxyAdminServiceRestStub): + def __hash__(self): + return hash("UpdateIapSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateIapSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IapSettings: + r"""Call the update iap settings method over HTTP. + + Args: + request (~.service.UpdateIapSettingsRequest): + The request object. The request sent to + UpdateIapSettings. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.IapSettings: + The IAP configurable settings. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{iap_settings.name=**}:iapSettings", + "body": "iap_settings", + }, + ] + request, metadata = self._interceptor.pre_update_iap_settings( + request, metadata + ) + pb_request = service.UpdateIapSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.IapSettings() + pb_resp = service.IapSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_iap_settings(resp) + return resp + + class _UpdateTunnelDestGroup(IdentityAwareProxyAdminServiceRestStub): + def __hash__(self): + return hash("UpdateTunnelDestGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateTunnelDestGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.TunnelDestGroup: + r"""Call the update tunnel dest group method over HTTP. + + Args: + request (~.service.UpdateTunnelDestGroupRequest): + The request object. The request to UpdateTunnelDestGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.TunnelDestGroup: + A TunnelDestGroup. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{tunnel_dest_group.name=projects/*/iap_tunnel/locations/*/destGroups/*}", + "body": "tunnel_dest_group", + }, + ] + request, metadata = self._interceptor.pre_update_tunnel_dest_group( + request, metadata + ) + pb_request = service.UpdateTunnelDestGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.TunnelDestGroup() + pb_resp = service.TunnelDestGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_tunnel_dest_group(resp) + return resp + + @property + def create_tunnel_dest_group( + self, + ) -> Callable[[service.CreateTunnelDestGroupRequest], service.TunnelDestGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTunnelDestGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_tunnel_dest_group( + self, + ) -> Callable[[service.DeleteTunnelDestGroupRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTunnelDestGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iap_settings( + self, + ) -> Callable[[service.GetIapSettingsRequest], service.IapSettings]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIapSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_tunnel_dest_group( + self, + ) -> Callable[[service.GetTunnelDestGroupRequest], service.TunnelDestGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTunnelDestGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_tunnel_dest_groups( + self, + ) -> Callable[ + [service.ListTunnelDestGroupsRequest], service.ListTunnelDestGroupsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTunnelDestGroups(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_iap_settings( + self, + ) -> Callable[[service.UpdateIapSettingsRequest], service.IapSettings]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateIapSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_tunnel_dest_group( + self, + ) -> Callable[[service.UpdateTunnelDestGroupRequest], service.TunnelDestGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTunnelDestGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("IdentityAwareProxyAdminServiceRestTransport",) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/__init__.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/__init__.py new file mode 100644 index 000000000000..7cdc6b2b41eb --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import IdentityAwareProxyOAuthServiceAsyncClient +from .client import IdentityAwareProxyOAuthServiceClient + +__all__ = ( + "IdentityAwareProxyOAuthServiceClient", + "IdentityAwareProxyOAuthServiceAsyncClient", +) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/async_client.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/async_client.py new file mode 100644 index 000000000000..d6675fd15855 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/async_client.py @@ -0,0 +1,906 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.iap_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.iap_v1.services.identity_aware_proxy_o_auth_service import pagers +from google.cloud.iap_v1.types import service + +from .client import IdentityAwareProxyOAuthServiceClient +from .transports.base import ( + DEFAULT_CLIENT_INFO, + IdentityAwareProxyOAuthServiceTransport, +) +from .transports.grpc_asyncio import IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport + + +class IdentityAwareProxyOAuthServiceAsyncClient: + """API to programmatically create, list and retrieve Identity + Aware Proxy (IAP) OAuth brands; and create, retrieve, delete and + reset-secret of IAP OAuth clients. + """ + + _client: IdentityAwareProxyOAuthServiceClient + + DEFAULT_ENDPOINT = IdentityAwareProxyOAuthServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = IdentityAwareProxyOAuthServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + IdentityAwareProxyOAuthServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + IdentityAwareProxyOAuthServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + IdentityAwareProxyOAuthServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + IdentityAwareProxyOAuthServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + IdentityAwareProxyOAuthServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + IdentityAwareProxyOAuthServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + IdentityAwareProxyOAuthServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + IdentityAwareProxyOAuthServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + IdentityAwareProxyOAuthServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + IdentityAwareProxyOAuthServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IdentityAwareProxyOAuthServiceAsyncClient: The constructed client. + """ + return IdentityAwareProxyOAuthServiceClient.from_service_account_info.__func__(IdentityAwareProxyOAuthServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IdentityAwareProxyOAuthServiceAsyncClient: The constructed client. + """ + return IdentityAwareProxyOAuthServiceClient.from_service_account_file.__func__(IdentityAwareProxyOAuthServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return IdentityAwareProxyOAuthServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> IdentityAwareProxyOAuthServiceTransport: + """Returns the transport used by the client instance. + + Returns: + IdentityAwareProxyOAuthServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(IdentityAwareProxyOAuthServiceClient).get_transport_class, + type(IdentityAwareProxyOAuthServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, IdentityAwareProxyOAuthServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the identity aware proxy o auth service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.IdentityAwareProxyOAuthServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = IdentityAwareProxyOAuthServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_brands( + self, + request: Optional[Union[service.ListBrandsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListBrandsResponse: + r"""Lists the existing brands for the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + async def sample_list_brands(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceAsyncClient() + + # Initialize request argument(s) + request = iap_v1.ListBrandsRequest( + parent="parent_value", + ) + + # Make the request + response = await client.list_brands(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.iap_v1.types.ListBrandsRequest, dict]]): + The request object. The request sent to ListBrands. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.ListBrandsResponse: + Response message for ListBrands. + """ + # Create or coerce a protobuf request object. + request = service.ListBrandsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_brands, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_brand( + self, + request: Optional[Union[service.CreateBrandRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Brand: + r"""Constructs a new OAuth brand for the project if one + does not exist. The created brand is "internal only", + meaning that OAuth clients created under it only accept + requests from users who belong to the same Google + Workspace organization as the project. The brand is + created in an un-reviewed status. NOTE: The "internal + only" status can be manually changed in the Google Cloud + Console. Requires that a brand does not already exist + for the project, and that the specified support email is + owned by the caller. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + async def sample_create_brand(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceAsyncClient() + + # Initialize request argument(s) + request = iap_v1.CreateBrandRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_brand(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.iap_v1.types.CreateBrandRequest, dict]]): + The request object. The request sent to CreateBrand. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.Brand: + OAuth brand data. + NOTE: Only contains a portion of the + data that describes a brand. + + """ + # Create or coerce a protobuf request object. + request = service.CreateBrandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_brand, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_brand( + self, + request: Optional[Union[service.GetBrandRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Brand: + r"""Retrieves the OAuth brand of the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + async def sample_get_brand(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceAsyncClient() + + # Initialize request argument(s) + request = iap_v1.GetBrandRequest( + name="name_value", + ) + + # Make the request + response = await client.get_brand(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.iap_v1.types.GetBrandRequest, dict]]): + The request object. The request sent to GetBrand. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.Brand: + OAuth brand data. + NOTE: Only contains a portion of the + data that describes a brand. + + """ + # Create or coerce a protobuf request object. + request = service.GetBrandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_brand, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_identity_aware_proxy_client( + self, + request: Optional[ + Union[service.CreateIdentityAwareProxyClientRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IdentityAwareProxyClient: + r"""Creates an Identity Aware Proxy (IAP) OAuth client. + The client is owned by IAP. Requires that the brand for + the project exists and that it is set for internal-only + use. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + async def sample_create_identity_aware_proxy_client(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceAsyncClient() + + # Initialize request argument(s) + request = iap_v1.CreateIdentityAwareProxyClientRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_identity_aware_proxy_client(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.iap_v1.types.CreateIdentityAwareProxyClientRequest, dict]]): + The request object. The request sent to + CreateIdentityAwareProxyClient. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.IdentityAwareProxyClient: + Contains the data that describes an + Identity Aware Proxy owned client. + + """ + # Create or coerce a protobuf request object. + request = service.CreateIdentityAwareProxyClientRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_identity_aware_proxy_client, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_identity_aware_proxy_clients( + self, + request: Optional[ + Union[service.ListIdentityAwareProxyClientsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListIdentityAwareProxyClientsAsyncPager: + r"""Lists the existing clients for the brand. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + async def sample_list_identity_aware_proxy_clients(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceAsyncClient() + + # Initialize request argument(s) + request = iap_v1.ListIdentityAwareProxyClientsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_identity_aware_proxy_clients(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.iap_v1.types.ListIdentityAwareProxyClientsRequest, dict]]): + The request object. The request sent to + ListIdentityAwareProxyClients. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.services.identity_aware_proxy_o_auth_service.pagers.ListIdentityAwareProxyClientsAsyncPager: + Response message for + ListIdentityAwareProxyClients. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = service.ListIdentityAwareProxyClientsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_identity_aware_proxy_clients, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListIdentityAwareProxyClientsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_identity_aware_proxy_client( + self, + request: Optional[ + Union[service.GetIdentityAwareProxyClientRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IdentityAwareProxyClient: + r"""Retrieves an Identity Aware Proxy (IAP) OAuth client. + Requires that the client is owned by IAP. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + async def sample_get_identity_aware_proxy_client(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceAsyncClient() + + # Initialize request argument(s) + request = iap_v1.GetIdentityAwareProxyClientRequest( + name="name_value", + ) + + # Make the request + response = await client.get_identity_aware_proxy_client(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.iap_v1.types.GetIdentityAwareProxyClientRequest, dict]]): + The request object. The request sent to + GetIdentityAwareProxyClient. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.IdentityAwareProxyClient: + Contains the data that describes an + Identity Aware Proxy owned client. + + """ + # Create or coerce a protobuf request object. + request = service.GetIdentityAwareProxyClientRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_identity_aware_proxy_client, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def reset_identity_aware_proxy_client_secret( + self, + request: Optional[ + Union[service.ResetIdentityAwareProxyClientSecretRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IdentityAwareProxyClient: + r"""Resets an Identity Aware Proxy (IAP) OAuth client + secret. Useful if the secret was compromised. Requires + that the client is owned by IAP. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + async def sample_reset_identity_aware_proxy_client_secret(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceAsyncClient() + + # Initialize request argument(s) + request = iap_v1.ResetIdentityAwareProxyClientSecretRequest( + name="name_value", + ) + + # Make the request + response = await client.reset_identity_aware_proxy_client_secret(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.iap_v1.types.ResetIdentityAwareProxyClientSecretRequest, dict]]): + The request object. The request sent to + ResetIdentityAwareProxyClientSecret. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.IdentityAwareProxyClient: + Contains the data that describes an + Identity Aware Proxy owned client. + + """ + # Create or coerce a protobuf request object. + request = service.ResetIdentityAwareProxyClientSecretRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reset_identity_aware_proxy_client_secret, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_identity_aware_proxy_client( + self, + request: Optional[ + Union[service.DeleteIdentityAwareProxyClientRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an Identity Aware Proxy (IAP) OAuth client. + Useful for removing obsolete clients, managing the + number of clients in a given project, and cleaning up + after tests. Requires that the client is owned by IAP. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + async def sample_delete_identity_aware_proxy_client(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceAsyncClient() + + # Initialize request argument(s) + request = iap_v1.DeleteIdentityAwareProxyClientRequest( + name="name_value", + ) + + # Make the request + await client.delete_identity_aware_proxy_client(request=request) + + Args: + request (Optional[Union[google.cloud.iap_v1.types.DeleteIdentityAwareProxyClientRequest, dict]]): + The request object. The request sent to + DeleteIdentityAwareProxyClient. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = service.DeleteIdentityAwareProxyClientRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_identity_aware_proxy_client, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("IdentityAwareProxyOAuthServiceAsyncClient",) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/client.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/client.py new file mode 100644 index 000000000000..d07e1fae7157 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/client.py @@ -0,0 +1,1137 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.iap_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.iap_v1.services.identity_aware_proxy_o_auth_service import pagers +from google.cloud.iap_v1.types import service + +from .transports.base import ( + DEFAULT_CLIENT_INFO, + IdentityAwareProxyOAuthServiceTransport, +) +from .transports.grpc import IdentityAwareProxyOAuthServiceGrpcTransport +from .transports.grpc_asyncio import IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport +from .transports.rest import IdentityAwareProxyOAuthServiceRestTransport + + +class IdentityAwareProxyOAuthServiceClientMeta(type): + """Metaclass for the IdentityAwareProxyOAuthService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[IdentityAwareProxyOAuthServiceTransport]] + _transport_registry["grpc"] = IdentityAwareProxyOAuthServiceGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport + _transport_registry["rest"] = IdentityAwareProxyOAuthServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[IdentityAwareProxyOAuthServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class IdentityAwareProxyOAuthServiceClient( + metaclass=IdentityAwareProxyOAuthServiceClientMeta +): + """API to programmatically create, list and retrieve Identity + Aware Proxy (IAP) OAuth brands; and create, retrieve, delete and + reset-secret of IAP OAuth clients. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "iap.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IdentityAwareProxyOAuthServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IdentityAwareProxyOAuthServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> IdentityAwareProxyOAuthServiceTransport: + """Returns the transport used by the client instance. + + Returns: + IdentityAwareProxyOAuthServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, IdentityAwareProxyOAuthServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the identity aware proxy o auth service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, IdentityAwareProxyOAuthServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, IdentityAwareProxyOAuthServiceTransport): + # transport is a IdentityAwareProxyOAuthServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_brands( + self, + request: Optional[Union[service.ListBrandsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListBrandsResponse: + r"""Lists the existing brands for the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + def sample_list_brands(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceClient() + + # Initialize request argument(s) + request = iap_v1.ListBrandsRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_brands(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.iap_v1.types.ListBrandsRequest, dict]): + The request object. The request sent to ListBrands. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.ListBrandsResponse: + Response message for ListBrands. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.ListBrandsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListBrandsRequest): + request = service.ListBrandsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_brands] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_brand( + self, + request: Optional[Union[service.CreateBrandRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Brand: + r"""Constructs a new OAuth brand for the project if one + does not exist. The created brand is "internal only", + meaning that OAuth clients created under it only accept + requests from users who belong to the same Google + Workspace organization as the project. The brand is + created in an un-reviewed status. NOTE: The "internal + only" status can be manually changed in the Google Cloud + Console. Requires that a brand does not already exist + for the project, and that the specified support email is + owned by the caller. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + def sample_create_brand(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceClient() + + # Initialize request argument(s) + request = iap_v1.CreateBrandRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_brand(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.iap_v1.types.CreateBrandRequest, dict]): + The request object. The request sent to CreateBrand. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.Brand: + OAuth brand data. + NOTE: Only contains a portion of the + data that describes a brand. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateBrandRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateBrandRequest): + request = service.CreateBrandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_brand] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_brand( + self, + request: Optional[Union[service.GetBrandRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Brand: + r"""Retrieves the OAuth brand of the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + def sample_get_brand(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceClient() + + # Initialize request argument(s) + request = iap_v1.GetBrandRequest( + name="name_value", + ) + + # Make the request + response = client.get_brand(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.iap_v1.types.GetBrandRequest, dict]): + The request object. The request sent to GetBrand. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.Brand: + OAuth brand data. + NOTE: Only contains a portion of the + data that describes a brand. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.GetBrandRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetBrandRequest): + request = service.GetBrandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_brand] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_identity_aware_proxy_client( + self, + request: Optional[ + Union[service.CreateIdentityAwareProxyClientRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IdentityAwareProxyClient: + r"""Creates an Identity Aware Proxy (IAP) OAuth client. + The client is owned by IAP. Requires that the brand for + the project exists and that it is set for internal-only + use. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + def sample_create_identity_aware_proxy_client(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceClient() + + # Initialize request argument(s) + request = iap_v1.CreateIdentityAwareProxyClientRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_identity_aware_proxy_client(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.iap_v1.types.CreateIdentityAwareProxyClientRequest, dict]): + The request object. The request sent to + CreateIdentityAwareProxyClient. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.IdentityAwareProxyClient: + Contains the data that describes an + Identity Aware Proxy owned client. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateIdentityAwareProxyClientRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateIdentityAwareProxyClientRequest): + request = service.CreateIdentityAwareProxyClientRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_identity_aware_proxy_client + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_identity_aware_proxy_clients( + self, + request: Optional[ + Union[service.ListIdentityAwareProxyClientsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListIdentityAwareProxyClientsPager: + r"""Lists the existing clients for the brand. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + def sample_list_identity_aware_proxy_clients(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceClient() + + # Initialize request argument(s) + request = iap_v1.ListIdentityAwareProxyClientsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_identity_aware_proxy_clients(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.iap_v1.types.ListIdentityAwareProxyClientsRequest, dict]): + The request object. The request sent to + ListIdentityAwareProxyClients. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.services.identity_aware_proxy_o_auth_service.pagers.ListIdentityAwareProxyClientsPager: + Response message for + ListIdentityAwareProxyClients. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.ListIdentityAwareProxyClientsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListIdentityAwareProxyClientsRequest): + request = service.ListIdentityAwareProxyClientsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_identity_aware_proxy_clients + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListIdentityAwareProxyClientsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_identity_aware_proxy_client( + self, + request: Optional[ + Union[service.GetIdentityAwareProxyClientRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IdentityAwareProxyClient: + r"""Retrieves an Identity Aware Proxy (IAP) OAuth client. + Requires that the client is owned by IAP. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + def sample_get_identity_aware_proxy_client(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceClient() + + # Initialize request argument(s) + request = iap_v1.GetIdentityAwareProxyClientRequest( + name="name_value", + ) + + # Make the request + response = client.get_identity_aware_proxy_client(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.iap_v1.types.GetIdentityAwareProxyClientRequest, dict]): + The request object. The request sent to + GetIdentityAwareProxyClient. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.IdentityAwareProxyClient: + Contains the data that describes an + Identity Aware Proxy owned client. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.GetIdentityAwareProxyClientRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetIdentityAwareProxyClientRequest): + request = service.GetIdentityAwareProxyClientRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_identity_aware_proxy_client + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def reset_identity_aware_proxy_client_secret( + self, + request: Optional[ + Union[service.ResetIdentityAwareProxyClientSecretRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IdentityAwareProxyClient: + r"""Resets an Identity Aware Proxy (IAP) OAuth client + secret. Useful if the secret was compromised. Requires + that the client is owned by IAP. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + def sample_reset_identity_aware_proxy_client_secret(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceClient() + + # Initialize request argument(s) + request = iap_v1.ResetIdentityAwareProxyClientSecretRequest( + name="name_value", + ) + + # Make the request + response = client.reset_identity_aware_proxy_client_secret(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.iap_v1.types.ResetIdentityAwareProxyClientSecretRequest, dict]): + The request object. The request sent to + ResetIdentityAwareProxyClientSecret. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.iap_v1.types.IdentityAwareProxyClient: + Contains the data that describes an + Identity Aware Proxy owned client. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.ResetIdentityAwareProxyClientSecretRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ResetIdentityAwareProxyClientSecretRequest): + request = service.ResetIdentityAwareProxyClientSecretRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.reset_identity_aware_proxy_client_secret + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_identity_aware_proxy_client( + self, + request: Optional[ + Union[service.DeleteIdentityAwareProxyClientRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an Identity Aware Proxy (IAP) OAuth client. + Useful for removing obsolete clients, managing the + number of clients in a given project, and cleaning up + after tests. Requires that the client is owned by IAP. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iap_v1 + + def sample_delete_identity_aware_proxy_client(): + # Create a client + client = iap_v1.IdentityAwareProxyOAuthServiceClient() + + # Initialize request argument(s) + request = iap_v1.DeleteIdentityAwareProxyClientRequest( + name="name_value", + ) + + # Make the request + client.delete_identity_aware_proxy_client(request=request) + + Args: + request (Union[google.cloud.iap_v1.types.DeleteIdentityAwareProxyClientRequest, dict]): + The request object. The request sent to + DeleteIdentityAwareProxyClient. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteIdentityAwareProxyClientRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteIdentityAwareProxyClientRequest): + request = service.DeleteIdentityAwareProxyClientRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_identity_aware_proxy_client + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "IdentityAwareProxyOAuthServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("IdentityAwareProxyOAuthServiceClient",) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/pagers.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/pagers.py new file mode 100644 index 000000000000..c059aa815717 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/pagers.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.iap_v1.types import service + + +class ListIdentityAwareProxyClientsPager: + """A pager for iterating through ``list_identity_aware_proxy_clients`` requests. + + This class thinly wraps an initial + :class:`google.cloud.iap_v1.types.ListIdentityAwareProxyClientsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``identity_aware_proxy_clients`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListIdentityAwareProxyClients`` requests and continue to iterate + through the ``identity_aware_proxy_clients`` field on the + corresponding responses. + + All the usual :class:`google.cloud.iap_v1.types.ListIdentityAwareProxyClientsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListIdentityAwareProxyClientsResponse], + request: service.ListIdentityAwareProxyClientsRequest, + response: service.ListIdentityAwareProxyClientsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.iap_v1.types.ListIdentityAwareProxyClientsRequest): + The initial request object. + response (google.cloud.iap_v1.types.ListIdentityAwareProxyClientsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListIdentityAwareProxyClientsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListIdentityAwareProxyClientsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[service.IdentityAwareProxyClient]: + for page in self.pages: + yield from page.identity_aware_proxy_clients + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListIdentityAwareProxyClientsAsyncPager: + """A pager for iterating through ``list_identity_aware_proxy_clients`` requests. + + This class thinly wraps an initial + :class:`google.cloud.iap_v1.types.ListIdentityAwareProxyClientsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``identity_aware_proxy_clients`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListIdentityAwareProxyClients`` requests and continue to iterate + through the ``identity_aware_proxy_clients`` field on the + corresponding responses. + + All the usual :class:`google.cloud.iap_v1.types.ListIdentityAwareProxyClientsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListIdentityAwareProxyClientsResponse]], + request: service.ListIdentityAwareProxyClientsRequest, + response: service.ListIdentityAwareProxyClientsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.iap_v1.types.ListIdentityAwareProxyClientsRequest): + The initial request object. + response (google.cloud.iap_v1.types.ListIdentityAwareProxyClientsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListIdentityAwareProxyClientsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[service.ListIdentityAwareProxyClientsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[service.IdentityAwareProxyClient]: + async def async_generator(): + async for page in self.pages: + for response in page.identity_aware_proxy_clients: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/__init__.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/__init__.py new file mode 100644 index 000000000000..e30ecbe86563 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import IdentityAwareProxyOAuthServiceTransport +from .grpc import IdentityAwareProxyOAuthServiceGrpcTransport +from .grpc_asyncio import IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport +from .rest import ( + IdentityAwareProxyOAuthServiceRestInterceptor, + IdentityAwareProxyOAuthServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[IdentityAwareProxyOAuthServiceTransport]] +_transport_registry["grpc"] = IdentityAwareProxyOAuthServiceGrpcTransport +_transport_registry["grpc_asyncio"] = IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport +_transport_registry["rest"] = IdentityAwareProxyOAuthServiceRestTransport + +__all__ = ( + "IdentityAwareProxyOAuthServiceTransport", + "IdentityAwareProxyOAuthServiceGrpcTransport", + "IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport", + "IdentityAwareProxyOAuthServiceRestTransport", + "IdentityAwareProxyOAuthServiceRestInterceptor", +) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/base.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/base.py new file mode 100644 index 000000000000..ccce69f47bf9 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/base.py @@ -0,0 +1,264 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.iap_v1 import gapic_version as package_version +from google.cloud.iap_v1.types import service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class IdentityAwareProxyOAuthServiceTransport(abc.ABC): + """Abstract transport class for IdentityAwareProxyOAuthService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "iap.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_brands: gapic_v1.method.wrap_method( + self.list_brands, + default_timeout=None, + client_info=client_info, + ), + self.create_brand: gapic_v1.method.wrap_method( + self.create_brand, + default_timeout=None, + client_info=client_info, + ), + self.get_brand: gapic_v1.method.wrap_method( + self.get_brand, + default_timeout=None, + client_info=client_info, + ), + self.create_identity_aware_proxy_client: gapic_v1.method.wrap_method( + self.create_identity_aware_proxy_client, + default_timeout=None, + client_info=client_info, + ), + self.list_identity_aware_proxy_clients: gapic_v1.method.wrap_method( + self.list_identity_aware_proxy_clients, + default_timeout=None, + client_info=client_info, + ), + self.get_identity_aware_proxy_client: gapic_v1.method.wrap_method( + self.get_identity_aware_proxy_client, + default_timeout=None, + client_info=client_info, + ), + self.reset_identity_aware_proxy_client_secret: gapic_v1.method.wrap_method( + self.reset_identity_aware_proxy_client_secret, + default_timeout=None, + client_info=client_info, + ), + self.delete_identity_aware_proxy_client: gapic_v1.method.wrap_method( + self.delete_identity_aware_proxy_client, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_brands( + self, + ) -> Callable[ + [service.ListBrandsRequest], + Union[service.ListBrandsResponse, Awaitable[service.ListBrandsResponse]], + ]: + raise NotImplementedError() + + @property + def create_brand( + self, + ) -> Callable[ + [service.CreateBrandRequest], Union[service.Brand, Awaitable[service.Brand]] + ]: + raise NotImplementedError() + + @property + def get_brand( + self, + ) -> Callable[ + [service.GetBrandRequest], Union[service.Brand, Awaitable[service.Brand]] + ]: + raise NotImplementedError() + + @property + def create_identity_aware_proxy_client( + self, + ) -> Callable[ + [service.CreateIdentityAwareProxyClientRequest], + Union[ + service.IdentityAwareProxyClient, + Awaitable[service.IdentityAwareProxyClient], + ], + ]: + raise NotImplementedError() + + @property + def list_identity_aware_proxy_clients( + self, + ) -> Callable[ + [service.ListIdentityAwareProxyClientsRequest], + Union[ + service.ListIdentityAwareProxyClientsResponse, + Awaitable[service.ListIdentityAwareProxyClientsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_identity_aware_proxy_client( + self, + ) -> Callable[ + [service.GetIdentityAwareProxyClientRequest], + Union[ + service.IdentityAwareProxyClient, + Awaitable[service.IdentityAwareProxyClient], + ], + ]: + raise NotImplementedError() + + @property + def reset_identity_aware_proxy_client_secret( + self, + ) -> Callable[ + [service.ResetIdentityAwareProxyClientSecretRequest], + Union[ + service.IdentityAwareProxyClient, + Awaitable[service.IdentityAwareProxyClient], + ], + ]: + raise NotImplementedError() + + @property + def delete_identity_aware_proxy_client( + self, + ) -> Callable[ + [service.DeleteIdentityAwareProxyClientRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("IdentityAwareProxyOAuthServiceTransport",) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/grpc.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/grpc.py new file mode 100644 index 000000000000..4e4d927c08fd --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/grpc.py @@ -0,0 +1,492 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.iap_v1.types import service + +from .base import DEFAULT_CLIENT_INFO, IdentityAwareProxyOAuthServiceTransport + + +class IdentityAwareProxyOAuthServiceGrpcTransport( + IdentityAwareProxyOAuthServiceTransport +): + """gRPC backend transport for IdentityAwareProxyOAuthService. + + API to programmatically create, list and retrieve Identity + Aware Proxy (IAP) OAuth brands; and create, retrieve, delete and + reset-secret of IAP OAuth clients. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "iap.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "iap.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_brands( + self, + ) -> Callable[[service.ListBrandsRequest], service.ListBrandsResponse]: + r"""Return a callable for the list brands method over gRPC. + + Lists the existing brands for the project. + + Returns: + Callable[[~.ListBrandsRequest], + ~.ListBrandsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_brands" not in self._stubs: + self._stubs["list_brands"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/ListBrands", + request_serializer=service.ListBrandsRequest.serialize, + response_deserializer=service.ListBrandsResponse.deserialize, + ) + return self._stubs["list_brands"] + + @property + def create_brand(self) -> Callable[[service.CreateBrandRequest], service.Brand]: + r"""Return a callable for the create brand method over gRPC. + + Constructs a new OAuth brand for the project if one + does not exist. The created brand is "internal only", + meaning that OAuth clients created under it only accept + requests from users who belong to the same Google + Workspace organization as the project. The brand is + created in an un-reviewed status. NOTE: The "internal + only" status can be manually changed in the Google Cloud + Console. Requires that a brand does not already exist + for the project, and that the specified support email is + owned by the caller. + + Returns: + Callable[[~.CreateBrandRequest], + ~.Brand]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_brand" not in self._stubs: + self._stubs["create_brand"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/CreateBrand", + request_serializer=service.CreateBrandRequest.serialize, + response_deserializer=service.Brand.deserialize, + ) + return self._stubs["create_brand"] + + @property + def get_brand(self) -> Callable[[service.GetBrandRequest], service.Brand]: + r"""Return a callable for the get brand method over gRPC. + + Retrieves the OAuth brand of the project. + + Returns: + Callable[[~.GetBrandRequest], + ~.Brand]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_brand" not in self._stubs: + self._stubs["get_brand"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/GetBrand", + request_serializer=service.GetBrandRequest.serialize, + response_deserializer=service.Brand.deserialize, + ) + return self._stubs["get_brand"] + + @property + def create_identity_aware_proxy_client( + self, + ) -> Callable[ + [service.CreateIdentityAwareProxyClientRequest], + service.IdentityAwareProxyClient, + ]: + r"""Return a callable for the create identity aware proxy + client method over gRPC. + + Creates an Identity Aware Proxy (IAP) OAuth client. + The client is owned by IAP. Requires that the brand for + the project exists and that it is set for internal-only + use. + + Returns: + Callable[[~.CreateIdentityAwareProxyClientRequest], + ~.IdentityAwareProxyClient]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_identity_aware_proxy_client" not in self._stubs: + self._stubs[ + "create_identity_aware_proxy_client" + ] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/CreateIdentityAwareProxyClient", + request_serializer=service.CreateIdentityAwareProxyClientRequest.serialize, + response_deserializer=service.IdentityAwareProxyClient.deserialize, + ) + return self._stubs["create_identity_aware_proxy_client"] + + @property + def list_identity_aware_proxy_clients( + self, + ) -> Callable[ + [service.ListIdentityAwareProxyClientsRequest], + service.ListIdentityAwareProxyClientsResponse, + ]: + r"""Return a callable for the list identity aware proxy + clients method over gRPC. + + Lists the existing clients for the brand. + + Returns: + Callable[[~.ListIdentityAwareProxyClientsRequest], + ~.ListIdentityAwareProxyClientsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_identity_aware_proxy_clients" not in self._stubs: + self._stubs[ + "list_identity_aware_proxy_clients" + ] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/ListIdentityAwareProxyClients", + request_serializer=service.ListIdentityAwareProxyClientsRequest.serialize, + response_deserializer=service.ListIdentityAwareProxyClientsResponse.deserialize, + ) + return self._stubs["list_identity_aware_proxy_clients"] + + @property + def get_identity_aware_proxy_client( + self, + ) -> Callable[ + [service.GetIdentityAwareProxyClientRequest], service.IdentityAwareProxyClient + ]: + r"""Return a callable for the get identity aware proxy + client method over gRPC. + + Retrieves an Identity Aware Proxy (IAP) OAuth client. + Requires that the client is owned by IAP. + + Returns: + Callable[[~.GetIdentityAwareProxyClientRequest], + ~.IdentityAwareProxyClient]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_identity_aware_proxy_client" not in self._stubs: + self._stubs[ + "get_identity_aware_proxy_client" + ] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/GetIdentityAwareProxyClient", + request_serializer=service.GetIdentityAwareProxyClientRequest.serialize, + response_deserializer=service.IdentityAwareProxyClient.deserialize, + ) + return self._stubs["get_identity_aware_proxy_client"] + + @property + def reset_identity_aware_proxy_client_secret( + self, + ) -> Callable[ + [service.ResetIdentityAwareProxyClientSecretRequest], + service.IdentityAwareProxyClient, + ]: + r"""Return a callable for the reset identity aware proxy + client secret method over gRPC. + + Resets an Identity Aware Proxy (IAP) OAuth client + secret. Useful if the secret was compromised. Requires + that the client is owned by IAP. + + Returns: + Callable[[~.ResetIdentityAwareProxyClientSecretRequest], + ~.IdentityAwareProxyClient]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reset_identity_aware_proxy_client_secret" not in self._stubs: + self._stubs[ + "reset_identity_aware_proxy_client_secret" + ] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/ResetIdentityAwareProxyClientSecret", + request_serializer=service.ResetIdentityAwareProxyClientSecretRequest.serialize, + response_deserializer=service.IdentityAwareProxyClient.deserialize, + ) + return self._stubs["reset_identity_aware_proxy_client_secret"] + + @property + def delete_identity_aware_proxy_client( + self, + ) -> Callable[[service.DeleteIdentityAwareProxyClientRequest], empty_pb2.Empty]: + r"""Return a callable for the delete identity aware proxy + client method over gRPC. + + Deletes an Identity Aware Proxy (IAP) OAuth client. + Useful for removing obsolete clients, managing the + number of clients in a given project, and cleaning up + after tests. Requires that the client is owned by IAP. + + Returns: + Callable[[~.DeleteIdentityAwareProxyClientRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_identity_aware_proxy_client" not in self._stubs: + self._stubs[ + "delete_identity_aware_proxy_client" + ] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/DeleteIdentityAwareProxyClient", + request_serializer=service.DeleteIdentityAwareProxyClientRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_identity_aware_proxy_client"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("IdentityAwareProxyOAuthServiceGrpcTransport",) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/grpc_asyncio.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..139d3cb3a721 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/grpc_asyncio.py @@ -0,0 +1,498 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.iap_v1.types import service + +from .base import DEFAULT_CLIENT_INFO, IdentityAwareProxyOAuthServiceTransport +from .grpc import IdentityAwareProxyOAuthServiceGrpcTransport + + +class IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport( + IdentityAwareProxyOAuthServiceTransport +): + """gRPC AsyncIO backend transport for IdentityAwareProxyOAuthService. + + API to programmatically create, list and retrieve Identity + Aware Proxy (IAP) OAuth brands; and create, retrieve, delete and + reset-secret of IAP OAuth clients. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "iap.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "iap.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_brands( + self, + ) -> Callable[[service.ListBrandsRequest], Awaitable[service.ListBrandsResponse]]: + r"""Return a callable for the list brands method over gRPC. + + Lists the existing brands for the project. + + Returns: + Callable[[~.ListBrandsRequest], + Awaitable[~.ListBrandsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_brands" not in self._stubs: + self._stubs["list_brands"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/ListBrands", + request_serializer=service.ListBrandsRequest.serialize, + response_deserializer=service.ListBrandsResponse.deserialize, + ) + return self._stubs["list_brands"] + + @property + def create_brand( + self, + ) -> Callable[[service.CreateBrandRequest], Awaitable[service.Brand]]: + r"""Return a callable for the create brand method over gRPC. + + Constructs a new OAuth brand for the project if one + does not exist. The created brand is "internal only", + meaning that OAuth clients created under it only accept + requests from users who belong to the same Google + Workspace organization as the project. The brand is + created in an un-reviewed status. NOTE: The "internal + only" status can be manually changed in the Google Cloud + Console. Requires that a brand does not already exist + for the project, and that the specified support email is + owned by the caller. + + Returns: + Callable[[~.CreateBrandRequest], + Awaitable[~.Brand]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_brand" not in self._stubs: + self._stubs["create_brand"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/CreateBrand", + request_serializer=service.CreateBrandRequest.serialize, + response_deserializer=service.Brand.deserialize, + ) + return self._stubs["create_brand"] + + @property + def get_brand( + self, + ) -> Callable[[service.GetBrandRequest], Awaitable[service.Brand]]: + r"""Return a callable for the get brand method over gRPC. + + Retrieves the OAuth brand of the project. + + Returns: + Callable[[~.GetBrandRequest], + Awaitable[~.Brand]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_brand" not in self._stubs: + self._stubs["get_brand"] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/GetBrand", + request_serializer=service.GetBrandRequest.serialize, + response_deserializer=service.Brand.deserialize, + ) + return self._stubs["get_brand"] + + @property + def create_identity_aware_proxy_client( + self, + ) -> Callable[ + [service.CreateIdentityAwareProxyClientRequest], + Awaitable[service.IdentityAwareProxyClient], + ]: + r"""Return a callable for the create identity aware proxy + client method over gRPC. + + Creates an Identity Aware Proxy (IAP) OAuth client. + The client is owned by IAP. Requires that the brand for + the project exists and that it is set for internal-only + use. + + Returns: + Callable[[~.CreateIdentityAwareProxyClientRequest], + Awaitable[~.IdentityAwareProxyClient]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_identity_aware_proxy_client" not in self._stubs: + self._stubs[ + "create_identity_aware_proxy_client" + ] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/CreateIdentityAwareProxyClient", + request_serializer=service.CreateIdentityAwareProxyClientRequest.serialize, + response_deserializer=service.IdentityAwareProxyClient.deserialize, + ) + return self._stubs["create_identity_aware_proxy_client"] + + @property + def list_identity_aware_proxy_clients( + self, + ) -> Callable[ + [service.ListIdentityAwareProxyClientsRequest], + Awaitable[service.ListIdentityAwareProxyClientsResponse], + ]: + r"""Return a callable for the list identity aware proxy + clients method over gRPC. + + Lists the existing clients for the brand. + + Returns: + Callable[[~.ListIdentityAwareProxyClientsRequest], + Awaitable[~.ListIdentityAwareProxyClientsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_identity_aware_proxy_clients" not in self._stubs: + self._stubs[ + "list_identity_aware_proxy_clients" + ] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/ListIdentityAwareProxyClients", + request_serializer=service.ListIdentityAwareProxyClientsRequest.serialize, + response_deserializer=service.ListIdentityAwareProxyClientsResponse.deserialize, + ) + return self._stubs["list_identity_aware_proxy_clients"] + + @property + def get_identity_aware_proxy_client( + self, + ) -> Callable[ + [service.GetIdentityAwareProxyClientRequest], + Awaitable[service.IdentityAwareProxyClient], + ]: + r"""Return a callable for the get identity aware proxy + client method over gRPC. + + Retrieves an Identity Aware Proxy (IAP) OAuth client. + Requires that the client is owned by IAP. + + Returns: + Callable[[~.GetIdentityAwareProxyClientRequest], + Awaitable[~.IdentityAwareProxyClient]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_identity_aware_proxy_client" not in self._stubs: + self._stubs[ + "get_identity_aware_proxy_client" + ] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/GetIdentityAwareProxyClient", + request_serializer=service.GetIdentityAwareProxyClientRequest.serialize, + response_deserializer=service.IdentityAwareProxyClient.deserialize, + ) + return self._stubs["get_identity_aware_proxy_client"] + + @property + def reset_identity_aware_proxy_client_secret( + self, + ) -> Callable[ + [service.ResetIdentityAwareProxyClientSecretRequest], + Awaitable[service.IdentityAwareProxyClient], + ]: + r"""Return a callable for the reset identity aware proxy + client secret method over gRPC. + + Resets an Identity Aware Proxy (IAP) OAuth client + secret. Useful if the secret was compromised. Requires + that the client is owned by IAP. + + Returns: + Callable[[~.ResetIdentityAwareProxyClientSecretRequest], + Awaitable[~.IdentityAwareProxyClient]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reset_identity_aware_proxy_client_secret" not in self._stubs: + self._stubs[ + "reset_identity_aware_proxy_client_secret" + ] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/ResetIdentityAwareProxyClientSecret", + request_serializer=service.ResetIdentityAwareProxyClientSecretRequest.serialize, + response_deserializer=service.IdentityAwareProxyClient.deserialize, + ) + return self._stubs["reset_identity_aware_proxy_client_secret"] + + @property + def delete_identity_aware_proxy_client( + self, + ) -> Callable[ + [service.DeleteIdentityAwareProxyClientRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete identity aware proxy + client method over gRPC. + + Deletes an Identity Aware Proxy (IAP) OAuth client. + Useful for removing obsolete clients, managing the + number of clients in a given project, and cleaning up + after tests. Requires that the client is owned by IAP. + + Returns: + Callable[[~.DeleteIdentityAwareProxyClientRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_identity_aware_proxy_client" not in self._stubs: + self._stubs[ + "delete_identity_aware_proxy_client" + ] = self.grpc_channel.unary_unary( + "/google.cloud.iap.v1.IdentityAwareProxyOAuthService/DeleteIdentityAwareProxyClient", + request_serializer=service.DeleteIdentityAwareProxyClientRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_identity_aware_proxy_client"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/rest.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/rest.py new file mode 100644 index 000000000000..2a8f6649f6ef --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/rest.py @@ -0,0 +1,1231 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.iap_v1.types import service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import IdentityAwareProxyOAuthServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class IdentityAwareProxyOAuthServiceRestInterceptor: + """Interceptor for IdentityAwareProxyOAuthService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the IdentityAwareProxyOAuthServiceRestTransport. + + .. code-block:: python + class MyCustomIdentityAwareProxyOAuthServiceInterceptor(IdentityAwareProxyOAuthServiceRestInterceptor): + def pre_create_brand(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_brand(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_identity_aware_proxy_client(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_identity_aware_proxy_client(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_identity_aware_proxy_client(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_brand(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_brand(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_identity_aware_proxy_client(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_identity_aware_proxy_client(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_brands(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_brands(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_identity_aware_proxy_clients(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_identity_aware_proxy_clients(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_reset_identity_aware_proxy_client_secret(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_reset_identity_aware_proxy_client_secret(self, response): + logging.log(f"Received response: {response}") + return response + + transport = IdentityAwareProxyOAuthServiceRestTransport(interceptor=MyCustomIdentityAwareProxyOAuthServiceInterceptor()) + client = IdentityAwareProxyOAuthServiceClient(transport=transport) + + + """ + + def pre_create_brand( + self, request: service.CreateBrandRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.CreateBrandRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_brand + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyOAuthService server. + """ + return request, metadata + + def post_create_brand(self, response: service.Brand) -> service.Brand: + """Post-rpc interceptor for create_brand + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyOAuthService server but before + it is returned to user code. + """ + return response + + def pre_create_identity_aware_proxy_client( + self, + request: service.CreateIdentityAwareProxyClientRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + service.CreateIdentityAwareProxyClientRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_identity_aware_proxy_client + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyOAuthService server. + """ + return request, metadata + + def post_create_identity_aware_proxy_client( + self, response: service.IdentityAwareProxyClient + ) -> service.IdentityAwareProxyClient: + """Post-rpc interceptor for create_identity_aware_proxy_client + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyOAuthService server but before + it is returned to user code. + """ + return response + + def pre_delete_identity_aware_proxy_client( + self, + request: service.DeleteIdentityAwareProxyClientRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + service.DeleteIdentityAwareProxyClientRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_identity_aware_proxy_client + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyOAuthService server. + """ + return request, metadata + + def pre_get_brand( + self, request: service.GetBrandRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetBrandRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_brand + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyOAuthService server. + """ + return request, metadata + + def post_get_brand(self, response: service.Brand) -> service.Brand: + """Post-rpc interceptor for get_brand + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyOAuthService server but before + it is returned to user code. + """ + return response + + def pre_get_identity_aware_proxy_client( + self, + request: service.GetIdentityAwareProxyClientRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.GetIdentityAwareProxyClientRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_identity_aware_proxy_client + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyOAuthService server. + """ + return request, metadata + + def post_get_identity_aware_proxy_client( + self, response: service.IdentityAwareProxyClient + ) -> service.IdentityAwareProxyClient: + """Post-rpc interceptor for get_identity_aware_proxy_client + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyOAuthService server but before + it is returned to user code. + """ + return response + + def pre_list_brands( + self, request: service.ListBrandsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListBrandsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_brands + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyOAuthService server. + """ + return request, metadata + + def post_list_brands( + self, response: service.ListBrandsResponse + ) -> service.ListBrandsResponse: + """Post-rpc interceptor for list_brands + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyOAuthService server but before + it is returned to user code. + """ + return response + + def pre_list_identity_aware_proxy_clients( + self, + request: service.ListIdentityAwareProxyClientsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.ListIdentityAwareProxyClientsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_identity_aware_proxy_clients + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyOAuthService server. + """ + return request, metadata + + def post_list_identity_aware_proxy_clients( + self, response: service.ListIdentityAwareProxyClientsResponse + ) -> service.ListIdentityAwareProxyClientsResponse: + """Post-rpc interceptor for list_identity_aware_proxy_clients + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyOAuthService server but before + it is returned to user code. + """ + return response + + def pre_reset_identity_aware_proxy_client_secret( + self, + request: service.ResetIdentityAwareProxyClientSecretRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + service.ResetIdentityAwareProxyClientSecretRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for reset_identity_aware_proxy_client_secret + + Override in a subclass to manipulate the request or metadata + before they are sent to the IdentityAwareProxyOAuthService server. + """ + return request, metadata + + def post_reset_identity_aware_proxy_client_secret( + self, response: service.IdentityAwareProxyClient + ) -> service.IdentityAwareProxyClient: + """Post-rpc interceptor for reset_identity_aware_proxy_client_secret + + Override in a subclass to manipulate the response + after it is returned by the IdentityAwareProxyOAuthService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class IdentityAwareProxyOAuthServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: IdentityAwareProxyOAuthServiceRestInterceptor + + +class IdentityAwareProxyOAuthServiceRestTransport( + IdentityAwareProxyOAuthServiceTransport +): + """REST backend transport for IdentityAwareProxyOAuthService. + + API to programmatically create, list and retrieve Identity + Aware Proxy (IAP) OAuth brands; and create, retrieve, delete and + reset-secret of IAP OAuth clients. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "iap.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[IdentityAwareProxyOAuthServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = ( + interceptor or IdentityAwareProxyOAuthServiceRestInterceptor() + ) + self._prep_wrapped_messages(client_info) + + class _CreateBrand(IdentityAwareProxyOAuthServiceRestStub): + def __hash__(self): + return hash("CreateBrand") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateBrandRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Brand: + r"""Call the create brand method over HTTP. + + Args: + request (~.service.CreateBrandRequest): + The request object. The request sent to CreateBrand. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.Brand: + OAuth brand data. + NOTE: Only contains a portion of the + data that describes a brand. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/brands", + "body": "brand", + }, + ] + request, metadata = self._interceptor.pre_create_brand(request, metadata) + pb_request = service.CreateBrandRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.Brand() + pb_resp = service.Brand.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_brand(resp) + return resp + + class _CreateIdentityAwareProxyClient(IdentityAwareProxyOAuthServiceRestStub): + def __hash__(self): + return hash("CreateIdentityAwareProxyClient") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.CreateIdentityAwareProxyClientRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IdentityAwareProxyClient: + r"""Call the create identity aware + proxy client method over HTTP. + + Args: + request (~.service.CreateIdentityAwareProxyClientRequest): + The request object. The request sent to + CreateIdentityAwareProxyClient. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.IdentityAwareProxyClient: + Contains the data that describes an + Identity Aware Proxy owned client. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/brands/*}/identityAwareProxyClients", + "body": "identity_aware_proxy_client", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_create_identity_aware_proxy_client( + request, metadata + ) + pb_request = service.CreateIdentityAwareProxyClientRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.IdentityAwareProxyClient() + pb_resp = service.IdentityAwareProxyClient.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_identity_aware_proxy_client(resp) + return resp + + class _DeleteIdentityAwareProxyClient(IdentityAwareProxyOAuthServiceRestStub): + def __hash__(self): + return hash("DeleteIdentityAwareProxyClient") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.DeleteIdentityAwareProxyClientRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete identity aware + proxy client method over HTTP. + + Args: + request (~.service.DeleteIdentityAwareProxyClientRequest): + The request object. The request sent to + DeleteIdentityAwareProxyClient. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/brands/*/identityAwareProxyClients/*}", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_delete_identity_aware_proxy_client( + request, metadata + ) + pb_request = service.DeleteIdentityAwareProxyClientRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetBrand(IdentityAwareProxyOAuthServiceRestStub): + def __hash__(self): + return hash("GetBrand") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetBrandRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Brand: + r"""Call the get brand method over HTTP. + + Args: + request (~.service.GetBrandRequest): + The request object. The request sent to GetBrand. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.Brand: + OAuth brand data. + NOTE: Only contains a portion of the + data that describes a brand. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/brands/*}", + }, + ] + request, metadata = self._interceptor.pre_get_brand(request, metadata) + pb_request = service.GetBrandRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.Brand() + pb_resp = service.Brand.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_brand(resp) + return resp + + class _GetIdentityAwareProxyClient(IdentityAwareProxyOAuthServiceRestStub): + def __hash__(self): + return hash("GetIdentityAwareProxyClient") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetIdentityAwareProxyClientRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IdentityAwareProxyClient: + r"""Call the get identity aware proxy + client method over HTTP. + + Args: + request (~.service.GetIdentityAwareProxyClientRequest): + The request object. The request sent to + GetIdentityAwareProxyClient. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.IdentityAwareProxyClient: + Contains the data that describes an + Identity Aware Proxy owned client. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/brands/*/identityAwareProxyClients/*}", + }, + ] + request, metadata = self._interceptor.pre_get_identity_aware_proxy_client( + request, metadata + ) + pb_request = service.GetIdentityAwareProxyClientRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.IdentityAwareProxyClient() + pb_resp = service.IdentityAwareProxyClient.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_identity_aware_proxy_client(resp) + return resp + + class _ListBrands(IdentityAwareProxyOAuthServiceRestStub): + def __hash__(self): + return hash("ListBrands") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListBrandsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListBrandsResponse: + r"""Call the list brands method over HTTP. + + Args: + request (~.service.ListBrandsRequest): + The request object. The request sent to ListBrands. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListBrandsResponse: + Response message for ListBrands. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*}/brands", + }, + ] + request, metadata = self._interceptor.pre_list_brands(request, metadata) + pb_request = service.ListBrandsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListBrandsResponse() + pb_resp = service.ListBrandsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_brands(resp) + return resp + + class _ListIdentityAwareProxyClients(IdentityAwareProxyOAuthServiceRestStub): + def __hash__(self): + return hash("ListIdentityAwareProxyClients") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListIdentityAwareProxyClientsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListIdentityAwareProxyClientsResponse: + r"""Call the list identity aware proxy + clients method over HTTP. + + Args: + request (~.service.ListIdentityAwareProxyClientsRequest): + The request object. The request sent to + ListIdentityAwareProxyClients. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListIdentityAwareProxyClientsResponse: + Response message for + ListIdentityAwareProxyClients. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/brands/*}/identityAwareProxyClients", + }, + ] + request, metadata = self._interceptor.pre_list_identity_aware_proxy_clients( + request, metadata + ) + pb_request = service.ListIdentityAwareProxyClientsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListIdentityAwareProxyClientsResponse() + pb_resp = service.ListIdentityAwareProxyClientsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_identity_aware_proxy_clients(resp) + return resp + + class _ResetIdentityAwareProxyClientSecret(IdentityAwareProxyOAuthServiceRestStub): + def __hash__(self): + return hash("ResetIdentityAwareProxyClientSecret") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ResetIdentityAwareProxyClientSecretRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IdentityAwareProxyClient: + r"""Call the reset identity aware + proxy client secret method over HTTP. + + Args: + request (~.service.ResetIdentityAwareProxyClientSecretRequest): + The request object. The request sent to + ResetIdentityAwareProxyClientSecret. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.IdentityAwareProxyClient: + Contains the data that describes an + Identity Aware Proxy owned client. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/brands/*/identityAwareProxyClients/*}:resetSecret", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_reset_identity_aware_proxy_client_secret( + request, metadata + ) + pb_request = service.ResetIdentityAwareProxyClientSecretRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.IdentityAwareProxyClient() + pb_resp = service.IdentityAwareProxyClient.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reset_identity_aware_proxy_client_secret(resp) + return resp + + @property + def create_brand(self) -> Callable[[service.CreateBrandRequest], service.Brand]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBrand(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_identity_aware_proxy_client( + self, + ) -> Callable[ + [service.CreateIdentityAwareProxyClientRequest], + service.IdentityAwareProxyClient, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateIdentityAwareProxyClient(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_identity_aware_proxy_client( + self, + ) -> Callable[[service.DeleteIdentityAwareProxyClientRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteIdentityAwareProxyClient(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_brand(self) -> Callable[[service.GetBrandRequest], service.Brand]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBrand(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_identity_aware_proxy_client( + self, + ) -> Callable[ + [service.GetIdentityAwareProxyClientRequest], service.IdentityAwareProxyClient + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIdentityAwareProxyClient(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_brands( + self, + ) -> Callable[[service.ListBrandsRequest], service.ListBrandsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBrands(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_identity_aware_proxy_clients( + self, + ) -> Callable[ + [service.ListIdentityAwareProxyClientsRequest], + service.ListIdentityAwareProxyClientsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListIdentityAwareProxyClients(self._session, self._host, self._interceptor) # type: ignore + + @property + def reset_identity_aware_proxy_client_secret( + self, + ) -> Callable[ + [service.ResetIdentityAwareProxyClientSecretRequest], + service.IdentityAwareProxyClient, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ResetIdentityAwareProxyClientSecret(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("IdentityAwareProxyOAuthServiceRestTransport",) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/types/__init__.py b/packages/google-cloud-iap/google/cloud/iap_v1/types/__init__.py new file mode 100644 index 000000000000..c7006a4f91a3 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/types/__init__.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .service import ( + AccessDeniedPageSettings, + AccessSettings, + AllowedDomainsSettings, + ApplicationSettings, + AttributePropagationSettings, + Brand, + CorsSettings, + CreateBrandRequest, + CreateIdentityAwareProxyClientRequest, + CreateTunnelDestGroupRequest, + CsmSettings, + DeleteIdentityAwareProxyClientRequest, + DeleteTunnelDestGroupRequest, + GcipSettings, + GetBrandRequest, + GetIapSettingsRequest, + GetIdentityAwareProxyClientRequest, + GetTunnelDestGroupRequest, + IapSettings, + IdentityAwareProxyClient, + ListBrandsRequest, + ListBrandsResponse, + ListIdentityAwareProxyClientsRequest, + ListIdentityAwareProxyClientsResponse, + ListTunnelDestGroupsRequest, + ListTunnelDestGroupsResponse, + OAuthSettings, + ReauthSettings, + ResetIdentityAwareProxyClientSecretRequest, + TunnelDestGroup, + UpdateIapSettingsRequest, + UpdateTunnelDestGroupRequest, +) + +__all__ = ( + "AccessDeniedPageSettings", + "AccessSettings", + "AllowedDomainsSettings", + "ApplicationSettings", + "AttributePropagationSettings", + "Brand", + "CorsSettings", + "CreateBrandRequest", + "CreateIdentityAwareProxyClientRequest", + "CreateTunnelDestGroupRequest", + "CsmSettings", + "DeleteIdentityAwareProxyClientRequest", + "DeleteTunnelDestGroupRequest", + "GcipSettings", + "GetBrandRequest", + "GetIapSettingsRequest", + "GetIdentityAwareProxyClientRequest", + "GetTunnelDestGroupRequest", + "IapSettings", + "IdentityAwareProxyClient", + "ListBrandsRequest", + "ListBrandsResponse", + "ListIdentityAwareProxyClientsRequest", + "ListIdentityAwareProxyClientsResponse", + "ListTunnelDestGroupsRequest", + "ListTunnelDestGroupsResponse", + "OAuthSettings", + "ReauthSettings", + "ResetIdentityAwareProxyClientSecretRequest", + "TunnelDestGroup", + "UpdateIapSettingsRequest", + "UpdateTunnelDestGroupRequest", +) diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/types/service.py b/packages/google-cloud-iap/google/cloud/iap_v1/types/service.py new file mode 100644 index 000000000000..c4357bcaa077 --- /dev/null +++ b/packages/google-cloud-iap/google/cloud/iap_v1/types/service.py @@ -0,0 +1,1030 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.iap.v1", + manifest={ + "ListTunnelDestGroupsRequest", + "ListTunnelDestGroupsResponse", + "CreateTunnelDestGroupRequest", + "GetTunnelDestGroupRequest", + "DeleteTunnelDestGroupRequest", + "UpdateTunnelDestGroupRequest", + "TunnelDestGroup", + "GetIapSettingsRequest", + "UpdateIapSettingsRequest", + "IapSettings", + "AccessSettings", + "GcipSettings", + "CorsSettings", + "OAuthSettings", + "ReauthSettings", + "AllowedDomainsSettings", + "ApplicationSettings", + "CsmSettings", + "AccessDeniedPageSettings", + "AttributePropagationSettings", + "ListBrandsRequest", + "ListBrandsResponse", + "CreateBrandRequest", + "GetBrandRequest", + "ListIdentityAwareProxyClientsRequest", + "ListIdentityAwareProxyClientsResponse", + "CreateIdentityAwareProxyClientRequest", + "GetIdentityAwareProxyClientRequest", + "ResetIdentityAwareProxyClientSecretRequest", + "DeleteIdentityAwareProxyClientRequest", + "Brand", + "IdentityAwareProxyClient", + }, +) + + +class ListTunnelDestGroupsRequest(proto.Message): + r"""The request to ListTunnelDestGroups. + + Attributes: + parent (str): + Required. Google Cloud Project ID and location. In the + following format: + ``projects/{project_number/id}/iap_tunnel/locations/{location}``. + A ``-`` can be used for the location to group across all + locations. + page_size (int): + The maximum number of groups to return. The + service might return fewer than this value. + If unspecified, at most 100 groups are returned. + The maximum value is 1000; values above 1000 are + coerced to 1000. + page_token (str): + A page token, received from a previous + ``ListTunnelDestGroups`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListTunnelDestGroups`` must match the call that provided + the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTunnelDestGroupsResponse(proto.Message): + r"""The response from ListTunnelDestGroups. + + Attributes: + tunnel_dest_groups (MutableSequence[google.cloud.iap_v1.types.TunnelDestGroup]): + TunnelDestGroup existing in the project. + next_page_token (str): + A token that you can send as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + tunnel_dest_groups: MutableSequence["TunnelDestGroup"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="TunnelDestGroup", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateTunnelDestGroupRequest(proto.Message): + r"""The request to CreateTunnelDestGroup. + + Attributes: + parent (str): + Required. Google Cloud Project ID and location. In the + following format: + ``projects/{project_number/id}/iap_tunnel/locations/{location}``. + tunnel_dest_group (google.cloud.iap_v1.types.TunnelDestGroup): + Required. The TunnelDestGroup to create. + tunnel_dest_group_id (str): + Required. The ID to use for the TunnelDestGroup, which + becomes the final component of the resource name. + + This value must be 4-63 characters, and valid characters are + ``[a-z]-``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + tunnel_dest_group: "TunnelDestGroup" = proto.Field( + proto.MESSAGE, + number=2, + message="TunnelDestGroup", + ) + tunnel_dest_group_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetTunnelDestGroupRequest(proto.Message): + r"""The request to GetTunnelDestGroup. + + Attributes: + name (str): + Required. Name of the TunnelDestGroup to be fetched. In the + following format: + ``projects/{project_number/id}/iap_tunnel/locations/{location}/destGroups/{dest_group}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteTunnelDestGroupRequest(proto.Message): + r"""The request to DeleteTunnelDestGroup. + + Attributes: + name (str): + Required. Name of the TunnelDestGroup to delete. In the + following format: + ``projects/{project_number/id}/iap_tunnel/locations/{location}/destGroups/{dest_group}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateTunnelDestGroupRequest(proto.Message): + r"""The request to UpdateTunnelDestGroup. + + Attributes: + tunnel_dest_group (google.cloud.iap_v1.types.TunnelDestGroup): + Required. The new values for the + TunnelDestGroup. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A field mask that specifies which IAP + settings to update. If omitted, then all of the + settings are updated. See + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + """ + + tunnel_dest_group: "TunnelDestGroup" = proto.Field( + proto.MESSAGE, + number=1, + message="TunnelDestGroup", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class TunnelDestGroup(proto.Message): + r"""A TunnelDestGroup. + + Attributes: + name (str): + Required. Immutable. Identifier for the + TunnelDestGroup. Must be unique within the + project and contain only lower case letters + (a-z) and dashes (-). + cidrs (MutableSequence[str]): + Unordered list. List of CIDRs that this group + applies to. + fqdns (MutableSequence[str]): + Unordered list. List of FQDNs that this group + applies to. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + cidrs: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + fqdns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetIapSettingsRequest(proto.Message): + r"""The request sent to GetIapSettings. + + Attributes: + name (str): + Required. The resource name for which to retrieve the + settings. Authorization: Requires the ``getSettings`` + permission for the associated resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateIapSettingsRequest(proto.Message): + r"""The request sent to UpdateIapSettings. + + Attributes: + iap_settings (google.cloud.iap_v1.types.IapSettings): + Required. The new values for the IAP settings to be updated. + Authorization: Requires the ``updateSettings`` permission + for the associated resource. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The field mask specifying which IAP settings should be + updated. If omitted, then all of the settings are updated. + See + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. + + Note: All IAP reauth settings must always be set together, + using the field mask: + ``iapSettings.accessSettings.reauthSettings``. + """ + + iap_settings: "IapSettings" = proto.Field( + proto.MESSAGE, + number=1, + message="IapSettings", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class IapSettings(proto.Message): + r"""The IAP configurable settings. + + Attributes: + name (str): + Required. The resource name of the IAP + protected resource. + access_settings (google.cloud.iap_v1.types.AccessSettings): + Top level wrapper for all access related + setting in IAP + application_settings (google.cloud.iap_v1.types.ApplicationSettings): + Top level wrapper for all application related + settings in IAP + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + access_settings: "AccessSettings" = proto.Field( + proto.MESSAGE, + number=5, + message="AccessSettings", + ) + application_settings: "ApplicationSettings" = proto.Field( + proto.MESSAGE, + number=6, + message="ApplicationSettings", + ) + + +class AccessSettings(proto.Message): + r"""Access related settings for IAP protected apps. + + Attributes: + gcip_settings (google.cloud.iap_v1.types.GcipSettings): + GCIP claims and endpoint configurations for + 3p identity providers. + cors_settings (google.cloud.iap_v1.types.CorsSettings): + Configuration to allow cross-origin requests + via IAP. + oauth_settings (google.cloud.iap_v1.types.OAuthSettings): + Settings to configure IAP's OAuth behavior. + reauth_settings (google.cloud.iap_v1.types.ReauthSettings): + Settings to configure reauthentication + policies in IAP. + allowed_domains_settings (google.cloud.iap_v1.types.AllowedDomainsSettings): + Settings to configure and enable allowed + domains. + """ + + gcip_settings: "GcipSettings" = proto.Field( + proto.MESSAGE, + number=1, + message="GcipSettings", + ) + cors_settings: "CorsSettings" = proto.Field( + proto.MESSAGE, + number=2, + message="CorsSettings", + ) + oauth_settings: "OAuthSettings" = proto.Field( + proto.MESSAGE, + number=3, + message="OAuthSettings", + ) + reauth_settings: "ReauthSettings" = proto.Field( + proto.MESSAGE, + number=6, + message="ReauthSettings", + ) + allowed_domains_settings: "AllowedDomainsSettings" = proto.Field( + proto.MESSAGE, + number=7, + message="AllowedDomainsSettings", + ) + + +class GcipSettings(proto.Message): + r"""Allows customers to configure tenant_id for GCIP instance per-app. + + Attributes: + tenant_ids (MutableSequence[str]): + GCIP tenant ids that are linked to the IAP resource. + tenant_ids could be a string beginning with a number + character to indicate authenticating with GCIP tenant flow, + or in the format of \_ to indicate authenticating with GCIP + agent flow. If agent flow is used, tenant_ids should only + contain one single element, while for tenant flow, + tenant_ids can contain multiple elements. + login_page_uri (google.protobuf.wrappers_pb2.StringValue): + Login page URI associated with the GCIP + tenants. Typically, all resources within the + same project share the same login page, though + it could be overridden at the sub resource + level. + """ + + tenant_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + login_page_uri: wrappers_pb2.StringValue = proto.Field( + proto.MESSAGE, + number=2, + message=wrappers_pb2.StringValue, + ) + + +class CorsSettings(proto.Message): + r"""Allows customers to configure HTTP request paths that'll + allow HTTP OPTIONS call to bypass authentication and + authorization. + + Attributes: + allow_http_options (google.protobuf.wrappers_pb2.BoolValue): + Configuration to allow HTTP OPTIONS calls to + skip authorization. If undefined, IAP will not + apply any special logic to OPTIONS requests. + """ + + allow_http_options: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.BoolValue, + ) + + +class OAuthSettings(proto.Message): + r"""Configuration for OAuth login&consent flow behavior as well + as for OAuth Credentials. + + Attributes: + login_hint (google.protobuf.wrappers_pb2.StringValue): + Domain hint to send as hd=? parameter in + OAuth request flow. Enables redirect to primary + IDP by skipping Google's login screen. + https://developers.google.com/identity/protocols/OpenIDConnect#hd-param + Note: IAP does not verify that the id token's hd + claim matches this value since access behavior + is managed by IAM policies. + """ + + login_hint: wrappers_pb2.StringValue = proto.Field( + proto.MESSAGE, + number=2, + message=wrappers_pb2.StringValue, + ) + + +class ReauthSettings(proto.Message): + r"""Configuration for IAP reauthentication policies. + + Attributes: + method (google.cloud.iap_v1.types.ReauthSettings.Method): + Reauth method requested. + max_age (google.protobuf.duration_pb2.Duration): + Reauth session lifetime, how long before a + user has to reauthenticate again. + policy_type (google.cloud.iap_v1.types.ReauthSettings.PolicyType): + How IAP determines the effective policy in + cases of hierarchial policies. Policies are + merged from higher in the hierarchy to lower in + the hierarchy. + """ + + class Method(proto.Enum): + r"""Types of reauthentication methods supported by IAP. + + Values: + METHOD_UNSPECIFIED (0): + Reauthentication disabled. + LOGIN (1): + Prompts the user to log in again. + PASSWORD (2): + No description available. + SECURE_KEY (3): + User must use their secure key 2nd factor + device. + ENROLLED_SECOND_FACTORS (4): + User can use any enabled 2nd factor. + """ + METHOD_UNSPECIFIED = 0 + LOGIN = 1 + PASSWORD = 2 + SECURE_KEY = 3 + ENROLLED_SECOND_FACTORS = 4 + + class PolicyType(proto.Enum): + r"""Type of policy in the case of hierarchial policies. + + Values: + POLICY_TYPE_UNSPECIFIED (0): + Default value. This value is unused. + MINIMUM (1): + This policy acts as a minimum to other + policies, lower in the hierarchy. Effective + policy may only be the same or stricter. + DEFAULT (2): + This policy acts as a default if no other + reauth policy is set. + """ + POLICY_TYPE_UNSPECIFIED = 0 + MINIMUM = 1 + DEFAULT = 2 + + method: Method = proto.Field( + proto.ENUM, + number=1, + enum=Method, + ) + max_age: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + policy_type: PolicyType = proto.Field( + proto.ENUM, + number=3, + enum=PolicyType, + ) + + +class AllowedDomainsSettings(proto.Message): + r"""Configuration for IAP allowed domains. Lets you to restrict + access to an app and allow access to only the domains that you + list. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable (bool): + Configuration for customers to opt in for the + feature. + + This field is a member of `oneof`_ ``_enable``. + domains (MutableSequence[str]): + List of trusted domains. + """ + + enable: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + domains: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class ApplicationSettings(proto.Message): + r"""Wrapper over application specific settings for IAP. + + Attributes: + csm_settings (google.cloud.iap_v1.types.CsmSettings): + Settings to configure IAP's behavior for a + service mesh. + access_denied_page_settings (google.cloud.iap_v1.types.AccessDeniedPageSettings): + Customization for Access Denied page. + cookie_domain (google.protobuf.wrappers_pb2.StringValue): + The Domain value to set for cookies generated + by IAP. This value is not validated by the API, + but will be ignored at runtime if invalid. + attribute_propagation_settings (google.cloud.iap_v1.types.AttributePropagationSettings): + Settings to configure attribute propagation. + """ + + csm_settings: "CsmSettings" = proto.Field( + proto.MESSAGE, + number=1, + message="CsmSettings", + ) + access_denied_page_settings: "AccessDeniedPageSettings" = proto.Field( + proto.MESSAGE, + number=2, + message="AccessDeniedPageSettings", + ) + cookie_domain: wrappers_pb2.StringValue = proto.Field( + proto.MESSAGE, + number=3, + message=wrappers_pb2.StringValue, + ) + attribute_propagation_settings: "AttributePropagationSettings" = proto.Field( + proto.MESSAGE, + number=4, + message="AttributePropagationSettings", + ) + + +class CsmSettings(proto.Message): + r"""Configuration for RCToken generated for service mesh + workloads protected by IAP. RCToken are IAP generated JWTs that + can be verified at the application. The RCToken is primarily + used for service mesh deployments, and can be scoped to a single + mesh by configuring the audience field accordingly. + + Attributes: + rctoken_aud (google.protobuf.wrappers_pb2.StringValue): + Audience claim set in the generated RCToken. + This value is not validated by IAP. + """ + + rctoken_aud: wrappers_pb2.StringValue = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.StringValue, + ) + + +class AccessDeniedPageSettings(proto.Message): + r"""Custom content configuration for access denied page. + IAP allows customers to define a custom URI to use as the error + page when access is denied to users. If IAP prevents access to + this page, the default IAP error page will be displayed instead. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + access_denied_page_uri (google.protobuf.wrappers_pb2.StringValue): + The URI to be redirected to when access is + denied. + generate_troubleshooting_uri (google.protobuf.wrappers_pb2.BoolValue): + Whether to generate a troubleshooting URL on + access denied events to this application. + remediation_token_generation_enabled (google.protobuf.wrappers_pb2.BoolValue): + Whether to generate remediation token on + access denied events to this application. + + This field is a member of `oneof`_ ``_remediation_token_generation_enabled``. + """ + + access_denied_page_uri: wrappers_pb2.StringValue = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.StringValue, + ) + generate_troubleshooting_uri: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=2, + message=wrappers_pb2.BoolValue, + ) + remediation_token_generation_enabled: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=wrappers_pb2.BoolValue, + ) + + +class AttributePropagationSettings(proto.Message): + r"""Configuration for propagating attributes to applications + protected by IAP. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + expression (str): + Raw string CEL expression. Must return a list of attributes. + A maximum of 45 attributes can be selected. Expressions can + select different attribute types from ``attributes``: + ``attributes.saml_attributes``, + ``attributes.iap_attributes``. The following functions are + supported: + + - filter ``.filter(, )``: + Returns a subset of ```` where ```` is + true for every item. + + - in `` in ``: Returns true if ```` + contains ````. + + - selectByName ``.selectByName()``: Returns + the attribute in ```` with the given ```` + name, otherwise returns empty. + + - emitAs ``.emitAs()``: Sets the + ```` name field to the given ```` for + propagation in selected output credentials. + + - strict ``.strict()``: Ignores the + ``x-goog-iap-attr-`` prefix for the provided + ```` when propagating with the ``HEADER`` + output credential, such as request headers. + + - append ``.append()`` OR + ``.append()``: Appends the provided + ```` or ```` to the end of + ````. + + Example expression: + ``attributes.saml_attributes.filter(x, x.name in ['test']).append(attributes.iap_attributes.selectByName('exact').emitAs('custom').strict())`` + + This field is a member of `oneof`_ ``_expression``. + output_credentials (MutableSequence[google.cloud.iap_v1.types.AttributePropagationSettings.OutputCredentials]): + Which output credentials attributes selected + by the CEL expression should be propagated in. + All attributes will be fully duplicated in each + selected output credential. + enable (bool): + Whether the provided attribute propagation + settings should be evaluated on user requests. + If set to true, attributes returned from the + expression will be propagated in the set output + credentials. + + This field is a member of `oneof`_ ``_enable``. + """ + + class OutputCredentials(proto.Enum): + r"""Supported output credentials for attribute propagation. Each + output credential maps to a "field" in the response. For + example, selecting JWT will propagate all attributes in the IAP + JWT, header in the headers, etc. + + Values: + OUTPUT_CREDENTIALS_UNSPECIFIED (0): + An output credential is required. + HEADER (1): + Propagate attributes in the headers with + "x-goog-iap-attr-" prefix. + JWT (2): + Propagate attributes in the JWT of the form: + ``"additional_claims": { "my_attribute": ["value1", "value2"] }`` + RCTOKEN (3): + Propagate attributes in the RCToken of the form: + ``"additional_claims": { "my_attribute": ["value1", "value2"] }`` + """ + OUTPUT_CREDENTIALS_UNSPECIFIED = 0 + HEADER = 1 + JWT = 2 + RCTOKEN = 3 + + expression: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + output_credentials: MutableSequence[OutputCredentials] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=OutputCredentials, + ) + enable: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + + +class ListBrandsRequest(proto.Message): + r"""The request sent to ListBrands. + + Attributes: + parent (str): + Required. GCP Project number/id. In the following format: + projects/{project_number/id}. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListBrandsResponse(proto.Message): + r"""Response message for ListBrands. + + Attributes: + brands (MutableSequence[google.cloud.iap_v1.types.Brand]): + Brands existing in the project. + """ + + brands: MutableSequence["Brand"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Brand", + ) + + +class CreateBrandRequest(proto.Message): + r"""The request sent to CreateBrand. + + Attributes: + parent (str): + Required. GCP Project number/id under which the brand is to + be created. In the following format: + projects/{project_number/id}. + brand (google.cloud.iap_v1.types.Brand): + Required. The brand to be created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + brand: "Brand" = proto.Field( + proto.MESSAGE, + number=2, + message="Brand", + ) + + +class GetBrandRequest(proto.Message): + r"""The request sent to GetBrand. + + Attributes: + name (str): + Required. Name of the brand to be fetched. In the following + format: projects/{project_number/id}/brands/{brand}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListIdentityAwareProxyClientsRequest(proto.Message): + r"""The request sent to ListIdentityAwareProxyClients. + + Attributes: + parent (str): + Required. Full brand path. In the following format: + projects/{project_number/id}/brands/{brand}. + page_size (int): + The maximum number of clients to return. The + service may return fewer than this value. + If unspecified, at most 100 clients will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + A page token, received from a previous + ``ListIdentityAwareProxyClients`` call. Provide this to + retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListIdentityAwareProxyClients`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListIdentityAwareProxyClientsResponse(proto.Message): + r"""Response message for ListIdentityAwareProxyClients. + + Attributes: + identity_aware_proxy_clients (MutableSequence[google.cloud.iap_v1.types.IdentityAwareProxyClient]): + Clients existing in the brand. + next_page_token (str): + A token, which can be send as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + identity_aware_proxy_clients: MutableSequence[ + "IdentityAwareProxyClient" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="IdentityAwareProxyClient", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateIdentityAwareProxyClientRequest(proto.Message): + r"""The request sent to CreateIdentityAwareProxyClient. + + Attributes: + parent (str): + Required. Path to create the client in. In the following + format: projects/{project_number/id}/brands/{brand}. The + project must belong to a G Suite account. + identity_aware_proxy_client (google.cloud.iap_v1.types.IdentityAwareProxyClient): + Required. Identity Aware Proxy Client to be + created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + identity_aware_proxy_client: "IdentityAwareProxyClient" = proto.Field( + proto.MESSAGE, + number=2, + message="IdentityAwareProxyClient", + ) + + +class GetIdentityAwareProxyClientRequest(proto.Message): + r"""The request sent to GetIdentityAwareProxyClient. + + Attributes: + name (str): + Required. Name of the Identity Aware Proxy client to be + fetched. In the following format: + projects/{project_number/id}/brands/{brand}/identityAwareProxyClients/{client_id}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ResetIdentityAwareProxyClientSecretRequest(proto.Message): + r"""The request sent to ResetIdentityAwareProxyClientSecret. + + Attributes: + name (str): + Required. Name of the Identity Aware Proxy client to that + will have its secret reset. In the following format: + projects/{project_number/id}/brands/{brand}/identityAwareProxyClients/{client_id}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteIdentityAwareProxyClientRequest(proto.Message): + r"""The request sent to DeleteIdentityAwareProxyClient. + + Attributes: + name (str): + Required. Name of the Identity Aware Proxy client to be + deleted. In the following format: + projects/{project_number/id}/brands/{brand}/identityAwareProxyClients/{client_id}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Brand(proto.Message): + r"""OAuth brand data. + NOTE: Only contains a portion of the data that describes a + brand. + + Attributes: + name (str): + Output only. Identifier of the brand. + NOTE: GCP project number achieves the same brand + identification purpose as only one brand per + project can be created. + support_email (str): + Support email displayed on the OAuth consent + screen. + application_title (str): + Application name displayed on OAuth consent + screen. + org_internal_only (bool): + Output only. Whether the brand is only + intended for usage inside the G Suite + organization only. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + support_email: str = proto.Field( + proto.STRING, + number=2, + ) + application_title: str = proto.Field( + proto.STRING, + number=3, + ) + org_internal_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class IdentityAwareProxyClient(proto.Message): + r"""Contains the data that describes an Identity Aware Proxy + owned client. + + Attributes: + name (str): + Output only. Unique identifier of the OAuth + client. + secret (str): + Output only. Client secret of the OAuth + client. + display_name (str): + Human-friendly name given to the OAuth + client. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + secret: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-iap/mypy.ini b/packages/google-cloud-iap/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-iap/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-iap/noxfile.py b/packages/google-cloud-iap/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-cloud-iap/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-cloud-iap/renovate.json b/packages/google-cloud-iap/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-iap/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-iap/scripts/decrypt-secrets.sh b/packages/google-cloud-iap/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-cloud-iap/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-iap/scripts/fixup_iap_v1_keywords.py b/packages/google-cloud-iap/scripts/fixup_iap_v1_keywords.py new file mode 100644 index 000000000000..7359ed1b64c0 --- /dev/null +++ b/packages/google-cloud-iap/scripts/fixup_iap_v1_keywords.py @@ -0,0 +1,193 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class iapCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_brand': ('parent', 'brand', ), + 'create_identity_aware_proxy_client': ('parent', 'identity_aware_proxy_client', ), + 'create_tunnel_dest_group': ('parent', 'tunnel_dest_group', 'tunnel_dest_group_id', ), + 'delete_identity_aware_proxy_client': ('name', ), + 'delete_tunnel_dest_group': ('name', ), + 'get_brand': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_iap_settings': ('name', ), + 'get_identity_aware_proxy_client': ('name', ), + 'get_tunnel_dest_group': ('name', ), + 'list_brands': ('parent', ), + 'list_identity_aware_proxy_clients': ('parent', 'page_size', 'page_token', ), + 'list_tunnel_dest_groups': ('parent', 'page_size', 'page_token', ), + 'reset_identity_aware_proxy_client_secret': ('name', ), + 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_iap_settings': ('iap_settings', 'update_mask', ), + 'update_tunnel_dest_group': ('tunnel_dest_group', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=iapCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the iap client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-iap/scripts/readme-gen/readme_gen.py b/packages/google-cloud-iap/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..91b59676bfc7 --- /dev/null +++ b/packages/google-cloud-iap/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template('README.tmpl.rst') + + +def get_help(file): + return subprocess.check_output(['python', file, '--help']).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('source') + parser.add_argument('--destination', default='README.rst') + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals['get_help'] = get_help + + with io.open(source, 'r') as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, 'w') as f: + f.write(output) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-iap/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-iap/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-iap/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-iap/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-iap/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-iap/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-iap/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-iap/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-iap/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-iap/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-iap/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-iap/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-iap/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-iap/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-iap/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-iap/setup.cfg b/packages/google-cloud-iap/setup.cfg new file mode 100644 index 000000000000..c3a2b39f6528 --- /dev/null +++ b/packages/google-cloud-iap/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-iap/setup.py b/packages/google-cloud-iap/setup.py new file mode 100644 index 000000000000..25e00709e93a --- /dev/null +++ b/packages/google-cloud-iap/setup.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-iap" + + +description = "Google Cloud Iap API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/iap/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-iap/testing/.gitignore b/packages/google-cloud-iap/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-iap/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-iap/testing/constraints-3.10.txt b/packages/google-cloud-iap/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-iap/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-iap/testing/constraints-3.11.txt b/packages/google-cloud-iap/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-iap/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-iap/testing/constraints-3.12.txt b/packages/google-cloud-iap/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-iap/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-iap/testing/constraints-3.7.txt b/packages/google-cloud-iap/testing/constraints-3.7.txt new file mode 100644 index 000000000000..2beecf99e0be --- /dev/null +++ b/packages/google-cloud-iap/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/packages/google-cloud-iap/testing/constraints-3.8.txt b/packages/google-cloud-iap/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-iap/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-iap/testing/constraints-3.9.txt b/packages/google-cloud-iap/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-iap/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-iap/tests/__init__.py b/packages/google-cloud-iap/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-iap/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-iap/tests/unit/__init__.py b/packages/google-cloud-iap/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-iap/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-iap/tests/unit/gapic/__init__.py b/packages/google-cloud-iap/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-iap/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-iap/tests/unit/gapic/iap_v1/__init__.py b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_admin_service.py b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_admin_service.py new file mode 100644 index 000000000000..4a1b6caf5302 --- /dev/null +++ b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_admin_service.py @@ -0,0 +1,6545 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import wrappers_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.iap_v1.services.identity_aware_proxy_admin_service import ( + IdentityAwareProxyAdminServiceAsyncClient, + IdentityAwareProxyAdminServiceClient, + pagers, + transports, +) +from google.cloud.iap_v1.types import service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert IdentityAwareProxyAdminServiceClient._get_default_mtls_endpoint(None) is None + assert ( + IdentityAwareProxyAdminServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + IdentityAwareProxyAdminServiceClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + IdentityAwareProxyAdminServiceClient._get_default_mtls_endpoint( + sandbox_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + IdentityAwareProxyAdminServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + IdentityAwareProxyAdminServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (IdentityAwareProxyAdminServiceClient, "grpc"), + (IdentityAwareProxyAdminServiceAsyncClient, "grpc_asyncio"), + (IdentityAwareProxyAdminServiceClient, "rest"), + ], +) +def test_identity_aware_proxy_admin_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "iap.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iap.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.IdentityAwareProxyAdminServiceGrpcTransport, "grpc"), + (transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.IdentityAwareProxyAdminServiceRestTransport, "rest"), + ], +) +def test_identity_aware_proxy_admin_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (IdentityAwareProxyAdminServiceClient, "grpc"), + (IdentityAwareProxyAdminServiceAsyncClient, "grpc_asyncio"), + (IdentityAwareProxyAdminServiceClient, "rest"), + ], +) +def test_identity_aware_proxy_admin_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "iap.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iap.googleapis.com" + ) + + +def test_identity_aware_proxy_admin_service_client_get_transport_class(): + transport = IdentityAwareProxyAdminServiceClient.get_transport_class() + available_transports = [ + transports.IdentityAwareProxyAdminServiceGrpcTransport, + transports.IdentityAwareProxyAdminServiceRestTransport, + ] + assert transport in available_transports + + transport = IdentityAwareProxyAdminServiceClient.get_transport_class("grpc") + assert transport == transports.IdentityAwareProxyAdminServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + IdentityAwareProxyAdminServiceClient, + transports.IdentityAwareProxyAdminServiceGrpcTransport, + "grpc", + ), + ( + IdentityAwareProxyAdminServiceAsyncClient, + transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + IdentityAwareProxyAdminServiceClient, + transports.IdentityAwareProxyAdminServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + IdentityAwareProxyAdminServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(IdentityAwareProxyAdminServiceClient), +) +@mock.patch.object( + IdentityAwareProxyAdminServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(IdentityAwareProxyAdminServiceAsyncClient), +) +def test_identity_aware_proxy_admin_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + IdentityAwareProxyAdminServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + IdentityAwareProxyAdminServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + IdentityAwareProxyAdminServiceClient, + transports.IdentityAwareProxyAdminServiceGrpcTransport, + "grpc", + "true", + ), + ( + IdentityAwareProxyAdminServiceAsyncClient, + transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + IdentityAwareProxyAdminServiceClient, + transports.IdentityAwareProxyAdminServiceGrpcTransport, + "grpc", + "false", + ), + ( + IdentityAwareProxyAdminServiceAsyncClient, + transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + IdentityAwareProxyAdminServiceClient, + transports.IdentityAwareProxyAdminServiceRestTransport, + "rest", + "true", + ), + ( + IdentityAwareProxyAdminServiceClient, + transports.IdentityAwareProxyAdminServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + IdentityAwareProxyAdminServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(IdentityAwareProxyAdminServiceClient), +) +@mock.patch.object( + IdentityAwareProxyAdminServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(IdentityAwareProxyAdminServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_identity_aware_proxy_admin_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [IdentityAwareProxyAdminServiceClient, IdentityAwareProxyAdminServiceAsyncClient], +) +@mock.patch.object( + IdentityAwareProxyAdminServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(IdentityAwareProxyAdminServiceClient), +) +@mock.patch.object( + IdentityAwareProxyAdminServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(IdentityAwareProxyAdminServiceAsyncClient), +) +def test_identity_aware_proxy_admin_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + IdentityAwareProxyAdminServiceClient, + transports.IdentityAwareProxyAdminServiceGrpcTransport, + "grpc", + ), + ( + IdentityAwareProxyAdminServiceAsyncClient, + transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + IdentityAwareProxyAdminServiceClient, + transports.IdentityAwareProxyAdminServiceRestTransport, + "rest", + ), + ], +) +def test_identity_aware_proxy_admin_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + IdentityAwareProxyAdminServiceClient, + transports.IdentityAwareProxyAdminServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + IdentityAwareProxyAdminServiceAsyncClient, + transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + IdentityAwareProxyAdminServiceClient, + transports.IdentityAwareProxyAdminServiceRestTransport, + "rest", + None, + ), + ], +) +def test_identity_aware_proxy_admin_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_identity_aware_proxy_admin_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.iap_v1.services.identity_aware_proxy_admin_service.transports.IdentityAwareProxyAdminServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = IdentityAwareProxyAdminServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + IdentityAwareProxyAdminServiceClient, + transports.IdentityAwareProxyAdminServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + IdentityAwareProxyAdminServiceAsyncClient, + transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_identity_aware_proxy_admin_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "iap.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="iap.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy(request_type, transport: str = "grpc"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict_foreign(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), + } + ) + call.assert_called() + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy(request_type, transport: str = "grpc"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict_foreign(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions(request_type, transport: str = "grpc"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async( + transport: str = "grpc_asyncio", + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict_foreign(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetIapSettingsRequest, + dict, + ], +) +def test_get_iap_settings(request_type, transport: str = "grpc"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iap_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.IapSettings( + name="name_value", + ) + response = client.get_iap_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetIapSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IapSettings) + assert response.name == "name_value" + + +def test_get_iap_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iap_settings), "__call__") as call: + client.get_iap_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetIapSettingsRequest() + + +@pytest.mark.asyncio +async def test_get_iap_settings_async( + transport: str = "grpc_asyncio", request_type=service.GetIapSettingsRequest +): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iap_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.IapSettings( + name="name_value", + ) + ) + response = await client.get_iap_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetIapSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IapSettings) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_iap_settings_async_from_dict(): + await test_get_iap_settings_async(request_type=dict) + + +def test_get_iap_settings_field_headers(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetIapSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iap_settings), "__call__") as call: + call.return_value = service.IapSettings() + client.get_iap_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iap_settings_field_headers_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetIapSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iap_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.IapSettings()) + await client.get_iap_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateIapSettingsRequest, + dict, + ], +) +def test_update_iap_settings(request_type, transport: str = "grpc"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_iap_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.IapSettings( + name="name_value", + ) + response = client.update_iap_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateIapSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IapSettings) + assert response.name == "name_value" + + +def test_update_iap_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_iap_settings), "__call__" + ) as call: + client.update_iap_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateIapSettingsRequest() + + +@pytest.mark.asyncio +async def test_update_iap_settings_async( + transport: str = "grpc_asyncio", request_type=service.UpdateIapSettingsRequest +): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_iap_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.IapSettings( + name="name_value", + ) + ) + response = await client.update_iap_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateIapSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IapSettings) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_update_iap_settings_async_from_dict(): + await test_update_iap_settings_async(request_type=dict) + + +def test_update_iap_settings_field_headers(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateIapSettingsRequest() + + request.iap_settings.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_iap_settings), "__call__" + ) as call: + call.return_value = service.IapSettings() + client.update_iap_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "iap_settings.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_iap_settings_field_headers_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateIapSettingsRequest() + + request.iap_settings.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_iap_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.IapSettings()) + await client.update_iap_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "iap_settings.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListTunnelDestGroupsRequest, + dict, + ], +) +def test_list_tunnel_dest_groups(request_type, transport: str = "grpc"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tunnel_dest_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListTunnelDestGroupsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_tunnel_dest_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListTunnelDestGroupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTunnelDestGroupsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tunnel_dest_groups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tunnel_dest_groups), "__call__" + ) as call: + client.list_tunnel_dest_groups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListTunnelDestGroupsRequest() + + +@pytest.mark.asyncio +async def test_list_tunnel_dest_groups_async( + transport: str = "grpc_asyncio", request_type=service.ListTunnelDestGroupsRequest +): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tunnel_dest_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListTunnelDestGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_tunnel_dest_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListTunnelDestGroupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTunnelDestGroupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_tunnel_dest_groups_async_from_dict(): + await test_list_tunnel_dest_groups_async(request_type=dict) + + +def test_list_tunnel_dest_groups_field_headers(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListTunnelDestGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tunnel_dest_groups), "__call__" + ) as call: + call.return_value = service.ListTunnelDestGroupsResponse() + client.list_tunnel_dest_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_tunnel_dest_groups_field_headers_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListTunnelDestGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tunnel_dest_groups), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListTunnelDestGroupsResponse() + ) + await client.list_tunnel_dest_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_tunnel_dest_groups_flattened(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tunnel_dest_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListTunnelDestGroupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tunnel_dest_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_tunnel_dest_groups_flattened_error(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tunnel_dest_groups( + service.ListTunnelDestGroupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_tunnel_dest_groups_flattened_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tunnel_dest_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListTunnelDestGroupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListTunnelDestGroupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tunnel_dest_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_tunnel_dest_groups_flattened_error_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tunnel_dest_groups( + service.ListTunnelDestGroupsRequest(), + parent="parent_value", + ) + + +def test_list_tunnel_dest_groups_pager(transport_name: str = "grpc"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tunnel_dest_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[ + service.TunnelDestGroup(), + service.TunnelDestGroup(), + service.TunnelDestGroup(), + ], + next_page_token="abc", + ), + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[], + next_page_token="def", + ), + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[ + service.TunnelDestGroup(), + ], + next_page_token="ghi", + ), + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[ + service.TunnelDestGroup(), + service.TunnelDestGroup(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_tunnel_dest_groups(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, service.TunnelDestGroup) for i in results) + + +def test_list_tunnel_dest_groups_pages(transport_name: str = "grpc"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tunnel_dest_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[ + service.TunnelDestGroup(), + service.TunnelDestGroup(), + service.TunnelDestGroup(), + ], + next_page_token="abc", + ), + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[], + next_page_token="def", + ), + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[ + service.TunnelDestGroup(), + ], + next_page_token="ghi", + ), + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[ + service.TunnelDestGroup(), + service.TunnelDestGroup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tunnel_dest_groups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tunnel_dest_groups_async_pager(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tunnel_dest_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[ + service.TunnelDestGroup(), + service.TunnelDestGroup(), + service.TunnelDestGroup(), + ], + next_page_token="abc", + ), + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[], + next_page_token="def", + ), + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[ + service.TunnelDestGroup(), + ], + next_page_token="ghi", + ), + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[ + service.TunnelDestGroup(), + service.TunnelDestGroup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tunnel_dest_groups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, service.TunnelDestGroup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_tunnel_dest_groups_async_pages(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tunnel_dest_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[ + service.TunnelDestGroup(), + service.TunnelDestGroup(), + service.TunnelDestGroup(), + ], + next_page_token="abc", + ), + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[], + next_page_token="def", + ), + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[ + service.TunnelDestGroup(), + ], + next_page_token="ghi", + ), + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[ + service.TunnelDestGroup(), + service.TunnelDestGroup(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_tunnel_dest_groups(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateTunnelDestGroupRequest, + dict, + ], +) +def test_create_tunnel_dest_group(request_type, transport: str = "grpc"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.TunnelDestGroup( + name="name_value", + cidrs=["cidrs_value"], + fqdns=["fqdns_value"], + ) + response = client.create_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateTunnelDestGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.TunnelDestGroup) + assert response.name == "name_value" + assert response.cidrs == ["cidrs_value"] + assert response.fqdns == ["fqdns_value"] + + +def test_create_tunnel_dest_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tunnel_dest_group), "__call__" + ) as call: + client.create_tunnel_dest_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateTunnelDestGroupRequest() + + +@pytest.mark.asyncio +async def test_create_tunnel_dest_group_async( + transport: str = "grpc_asyncio", request_type=service.CreateTunnelDestGroupRequest +): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.TunnelDestGroup( + name="name_value", + cidrs=["cidrs_value"], + fqdns=["fqdns_value"], + ) + ) + response = await client.create_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateTunnelDestGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.TunnelDestGroup) + assert response.name == "name_value" + assert response.cidrs == ["cidrs_value"] + assert response.fqdns == ["fqdns_value"] + + +@pytest.mark.asyncio +async def test_create_tunnel_dest_group_async_from_dict(): + await test_create_tunnel_dest_group_async(request_type=dict) + + +def test_create_tunnel_dest_group_field_headers(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateTunnelDestGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tunnel_dest_group), "__call__" + ) as call: + call.return_value = service.TunnelDestGroup() + client.create_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_tunnel_dest_group_field_headers_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateTunnelDestGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tunnel_dest_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.TunnelDestGroup() + ) + await client.create_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_tunnel_dest_group_flattened(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.TunnelDestGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_tunnel_dest_group( + parent="parent_value", + tunnel_dest_group=service.TunnelDestGroup(name="name_value"), + tunnel_dest_group_id="tunnel_dest_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].tunnel_dest_group + mock_val = service.TunnelDestGroup(name="name_value") + assert arg == mock_val + arg = args[0].tunnel_dest_group_id + mock_val = "tunnel_dest_group_id_value" + assert arg == mock_val + + +def test_create_tunnel_dest_group_flattened_error(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_tunnel_dest_group( + service.CreateTunnelDestGroupRequest(), + parent="parent_value", + tunnel_dest_group=service.TunnelDestGroup(name="name_value"), + tunnel_dest_group_id="tunnel_dest_group_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_tunnel_dest_group_flattened_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.TunnelDestGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.TunnelDestGroup() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_tunnel_dest_group( + parent="parent_value", + tunnel_dest_group=service.TunnelDestGroup(name="name_value"), + tunnel_dest_group_id="tunnel_dest_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].tunnel_dest_group + mock_val = service.TunnelDestGroup(name="name_value") + assert arg == mock_val + arg = args[0].tunnel_dest_group_id + mock_val = "tunnel_dest_group_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_tunnel_dest_group_flattened_error_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_tunnel_dest_group( + service.CreateTunnelDestGroupRequest(), + parent="parent_value", + tunnel_dest_group=service.TunnelDestGroup(name="name_value"), + tunnel_dest_group_id="tunnel_dest_group_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetTunnelDestGroupRequest, + dict, + ], +) +def test_get_tunnel_dest_group(request_type, transport: str = "grpc"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.TunnelDestGroup( + name="name_value", + cidrs=["cidrs_value"], + fqdns=["fqdns_value"], + ) + response = client.get_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetTunnelDestGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.TunnelDestGroup) + assert response.name == "name_value" + assert response.cidrs == ["cidrs_value"] + assert response.fqdns == ["fqdns_value"] + + +def test_get_tunnel_dest_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tunnel_dest_group), "__call__" + ) as call: + client.get_tunnel_dest_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetTunnelDestGroupRequest() + + +@pytest.mark.asyncio +async def test_get_tunnel_dest_group_async( + transport: str = "grpc_asyncio", request_type=service.GetTunnelDestGroupRequest +): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.TunnelDestGroup( + name="name_value", + cidrs=["cidrs_value"], + fqdns=["fqdns_value"], + ) + ) + response = await client.get_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetTunnelDestGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.TunnelDestGroup) + assert response.name == "name_value" + assert response.cidrs == ["cidrs_value"] + assert response.fqdns == ["fqdns_value"] + + +@pytest.mark.asyncio +async def test_get_tunnel_dest_group_async_from_dict(): + await test_get_tunnel_dest_group_async(request_type=dict) + + +def test_get_tunnel_dest_group_field_headers(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetTunnelDestGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tunnel_dest_group), "__call__" + ) as call: + call.return_value = service.TunnelDestGroup() + client.get_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_tunnel_dest_group_field_headers_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetTunnelDestGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tunnel_dest_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.TunnelDestGroup() + ) + await client.get_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_tunnel_dest_group_flattened(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.TunnelDestGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_tunnel_dest_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_tunnel_dest_group_flattened_error(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_tunnel_dest_group( + service.GetTunnelDestGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_tunnel_dest_group_flattened_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.TunnelDestGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.TunnelDestGroup() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_tunnel_dest_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_tunnel_dest_group_flattened_error_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_tunnel_dest_group( + service.GetTunnelDestGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteTunnelDestGroupRequest, + dict, + ], +) +def test_delete_tunnel_dest_group(request_type, transport: str = "grpc"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteTunnelDestGroupRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_tunnel_dest_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tunnel_dest_group), "__call__" + ) as call: + client.delete_tunnel_dest_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteTunnelDestGroupRequest() + + +@pytest.mark.asyncio +async def test_delete_tunnel_dest_group_async( + transport: str = "grpc_asyncio", request_type=service.DeleteTunnelDestGroupRequest +): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteTunnelDestGroupRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_tunnel_dest_group_async_from_dict(): + await test_delete_tunnel_dest_group_async(request_type=dict) + + +def test_delete_tunnel_dest_group_field_headers(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteTunnelDestGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tunnel_dest_group), "__call__" + ) as call: + call.return_value = None + client.delete_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_tunnel_dest_group_field_headers_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteTunnelDestGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tunnel_dest_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_tunnel_dest_group_flattened(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_tunnel_dest_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_tunnel_dest_group_flattened_error(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tunnel_dest_group( + service.DeleteTunnelDestGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_tunnel_dest_group_flattened_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_tunnel_dest_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_tunnel_dest_group_flattened_error_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_tunnel_dest_group( + service.DeleteTunnelDestGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateTunnelDestGroupRequest, + dict, + ], +) +def test_update_tunnel_dest_group(request_type, transport: str = "grpc"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.TunnelDestGroup( + name="name_value", + cidrs=["cidrs_value"], + fqdns=["fqdns_value"], + ) + response = client.update_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateTunnelDestGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.TunnelDestGroup) + assert response.name == "name_value" + assert response.cidrs == ["cidrs_value"] + assert response.fqdns == ["fqdns_value"] + + +def test_update_tunnel_dest_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tunnel_dest_group), "__call__" + ) as call: + client.update_tunnel_dest_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateTunnelDestGroupRequest() + + +@pytest.mark.asyncio +async def test_update_tunnel_dest_group_async( + transport: str = "grpc_asyncio", request_type=service.UpdateTunnelDestGroupRequest +): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.TunnelDestGroup( + name="name_value", + cidrs=["cidrs_value"], + fqdns=["fqdns_value"], + ) + ) + response = await client.update_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateTunnelDestGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.TunnelDestGroup) + assert response.name == "name_value" + assert response.cidrs == ["cidrs_value"] + assert response.fqdns == ["fqdns_value"] + + +@pytest.mark.asyncio +async def test_update_tunnel_dest_group_async_from_dict(): + await test_update_tunnel_dest_group_async(request_type=dict) + + +def test_update_tunnel_dest_group_field_headers(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateTunnelDestGroupRequest() + + request.tunnel_dest_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tunnel_dest_group), "__call__" + ) as call: + call.return_value = service.TunnelDestGroup() + client.update_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "tunnel_dest_group.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_tunnel_dest_group_field_headers_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateTunnelDestGroupRequest() + + request.tunnel_dest_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tunnel_dest_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.TunnelDestGroup() + ) + await client.update_tunnel_dest_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "tunnel_dest_group.name=name_value", + ) in kw["metadata"] + + +def test_update_tunnel_dest_group_flattened(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.TunnelDestGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_tunnel_dest_group( + tunnel_dest_group=service.TunnelDestGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].tunnel_dest_group + mock_val = service.TunnelDestGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_tunnel_dest_group_flattened_error(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tunnel_dest_group( + service.UpdateTunnelDestGroupRequest(), + tunnel_dest_group=service.TunnelDestGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_tunnel_dest_group_flattened_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tunnel_dest_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.TunnelDestGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.TunnelDestGroup() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_tunnel_dest_group( + tunnel_dest_group=service.TunnelDestGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].tunnel_dest_group + mock_val = service.TunnelDestGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_tunnel_dest_group_flattened_error_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_tunnel_dest_group( + service.UpdateTunnelDestGroupRequest(), + tunnel_dest_group=service.TunnelDestGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + transport_class = transports.IdentityAwareProxyAdminServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "policy", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyAdminServiceRestInterceptor(), + ) + client = IdentityAwareProxyAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, "post_set_iam_policy" + ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, "pre_set_iam_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = iam_policy_pb2.SetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(policy_pb2.Policy()) + + request = iam_policy_pb2.SetIamPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + + client.set_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_error(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + transport_class = transports.IdentityAwareProxyAdminServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyAdminServiceRestInterceptor(), + ) + client = IdentityAwareProxyAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, "post_get_iam_policy" + ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, "pre_get_iam_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = iam_policy_pb2.GetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(policy_pb2.Policy()) + + request = iam_policy_pb2.GetIamPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + + client.get_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_error(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_rest_required_fields( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + transport_class = transports.IdentityAwareProxyAdminServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request_init["permissions"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + jsonified_request["permissions"] = "permissions_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + assert "permissions" in jsonified_request + assert jsonified_request["permissions"] == "permissions_value" + + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "permissions", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyAdminServiceRestInterceptor(), + ) + client = IdentityAwareProxyAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_test_iam_permissions", + ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "pre_test_iam_permissions", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = iam_policy_pb2.TestIamPermissionsRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + request = iam_policy_pb2.TestIamPermissionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_error(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetIapSettingsRequest, + dict, + ], +) +def test_get_iap_settings_rest(request_type): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.IapSettings( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.IapSettings.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_iap_settings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IapSettings) + assert response.name == "name_value" + + +def test_get_iap_settings_rest_required_fields( + request_type=service.GetIapSettingsRequest, +): + transport_class = transports.IdentityAwareProxyAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iap_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iap_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.IapSettings() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.IapSettings.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iap_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iap_settings_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iap_settings._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iap_settings_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyAdminServiceRestInterceptor(), + ) + client = IdentityAwareProxyAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_get_iap_settings", + ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, "pre_get_iap_settings" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetIapSettingsRequest.pb(service.GetIapSettingsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.IapSettings.to_json(service.IapSettings()) + + request = service.GetIapSettingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.IapSettings() + + client.get_iap_settings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iap_settings_rest_bad_request( + transport: str = "rest", request_type=service.GetIapSettingsRequest +): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iap_settings(request) + + +def test_get_iap_settings_rest_error(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateIapSettingsRequest, + dict, + ], +) +def test_update_iap_settings_rest(request_type): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"iap_settings": {"name": "sample1"}} + request_init["iap_settings"] = { + "name": "sample1", + "access_settings": { + "gcip_settings": { + "tenant_ids": ["tenant_ids_value1", "tenant_ids_value2"], + "login_page_uri": {"value": "value_value"}, + }, + "cors_settings": {"allow_http_options": {"value": True}}, + "oauth_settings": {"login_hint": {}}, + "reauth_settings": { + "method": 1, + "max_age": {"seconds": 751, "nanos": 543}, + "policy_type": 1, + }, + "allowed_domains_settings": { + "enable": True, + "domains": ["domains_value1", "domains_value2"], + }, + }, + "application_settings": { + "csm_settings": {"rctoken_aud": {}}, + "access_denied_page_settings": { + "access_denied_page_uri": {}, + "generate_troubleshooting_uri": {}, + "remediation_token_generation_enabled": {}, + }, + "cookie_domain": {}, + "attribute_propagation_settings": { + "expression": "expression_value", + "output_credentials": [1], + "enable": True, + }, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.IapSettings( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.IapSettings.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_iap_settings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IapSettings) + assert response.name == "name_value" + + +def test_update_iap_settings_rest_required_fields( + request_type=service.UpdateIapSettingsRequest, +): + transport_class = transports.IdentityAwareProxyAdminServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_iap_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_iap_settings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.IapSettings() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.IapSettings.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_iap_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_iap_settings_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_iap_settings._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("iapSettings",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_iap_settings_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyAdminServiceRestInterceptor(), + ) + client = IdentityAwareProxyAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_update_iap_settings", + ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "pre_update_iap_settings", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateIapSettingsRequest.pb( + service.UpdateIapSettingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.IapSettings.to_json(service.IapSettings()) + + request = service.UpdateIapSettingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.IapSettings() + + client.update_iap_settings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_iap_settings_rest_bad_request( + transport: str = "rest", request_type=service.UpdateIapSettingsRequest +): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"iap_settings": {"name": "sample1"}} + request_init["iap_settings"] = { + "name": "sample1", + "access_settings": { + "gcip_settings": { + "tenant_ids": ["tenant_ids_value1", "tenant_ids_value2"], + "login_page_uri": {"value": "value_value"}, + }, + "cors_settings": {"allow_http_options": {"value": True}}, + "oauth_settings": {"login_hint": {}}, + "reauth_settings": { + "method": 1, + "max_age": {"seconds": 751, "nanos": 543}, + "policy_type": 1, + }, + "allowed_domains_settings": { + "enable": True, + "domains": ["domains_value1", "domains_value2"], + }, + }, + "application_settings": { + "csm_settings": {"rctoken_aud": {}}, + "access_denied_page_settings": { + "access_denied_page_uri": {}, + "generate_troubleshooting_uri": {}, + "remediation_token_generation_enabled": {}, + }, + "cookie_domain": {}, + "attribute_propagation_settings": { + "expression": "expression_value", + "output_credentials": [1], + "enable": True, + }, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_iap_settings(request) + + +def test_update_iap_settings_rest_error(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListTunnelDestGroupsRequest, + dict, + ], +) +def test_list_tunnel_dest_groups_rest(request_type): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/iap_tunnel/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListTunnelDestGroupsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListTunnelDestGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_tunnel_dest_groups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTunnelDestGroupsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tunnel_dest_groups_rest_required_fields( + request_type=service.ListTunnelDestGroupsRequest, +): + transport_class = transports.IdentityAwareProxyAdminServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tunnel_dest_groups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tunnel_dest_groups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListTunnelDestGroupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListTunnelDestGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_tunnel_dest_groups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_tunnel_dest_groups_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_tunnel_dest_groups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_tunnel_dest_groups_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyAdminServiceRestInterceptor(), + ) + client = IdentityAwareProxyAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_list_tunnel_dest_groups", + ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "pre_list_tunnel_dest_groups", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListTunnelDestGroupsRequest.pb( + service.ListTunnelDestGroupsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListTunnelDestGroupsResponse.to_json( + service.ListTunnelDestGroupsResponse() + ) + + request = service.ListTunnelDestGroupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListTunnelDestGroupsResponse() + + client.list_tunnel_dest_groups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_tunnel_dest_groups_rest_bad_request( + transport: str = "rest", request_type=service.ListTunnelDestGroupsRequest +): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/iap_tunnel/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_tunnel_dest_groups(request) + + +def test_list_tunnel_dest_groups_rest_flattened(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListTunnelDestGroupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/iap_tunnel/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListTunnelDestGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_tunnel_dest_groups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/iap_tunnel/locations/*}/destGroups" + % client.transport._host, + args[1], + ) + + +def test_list_tunnel_dest_groups_rest_flattened_error(transport: str = "rest"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tunnel_dest_groups( + service.ListTunnelDestGroupsRequest(), + parent="parent_value", + ) + + +def test_list_tunnel_dest_groups_rest_pager(transport: str = "rest"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[ + service.TunnelDestGroup(), + service.TunnelDestGroup(), + service.TunnelDestGroup(), + ], + next_page_token="abc", + ), + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[], + next_page_token="def", + ), + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[ + service.TunnelDestGroup(), + ], + next_page_token="ghi", + ), + service.ListTunnelDestGroupsResponse( + tunnel_dest_groups=[ + service.TunnelDestGroup(), + service.TunnelDestGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + service.ListTunnelDestGroupsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/iap_tunnel/locations/sample2"} + + pager = client.list_tunnel_dest_groups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, service.TunnelDestGroup) for i in results) + + pages = list(client.list_tunnel_dest_groups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateTunnelDestGroupRequest, + dict, + ], +) +def test_create_tunnel_dest_group_rest(request_type): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/iap_tunnel/locations/sample2"} + request_init["tunnel_dest_group"] = { + "name": "name_value", + "cidrs": ["cidrs_value1", "cidrs_value2"], + "fqdns": ["fqdns_value1", "fqdns_value2"], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.TunnelDestGroup( + name="name_value", + cidrs=["cidrs_value"], + fqdns=["fqdns_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.TunnelDestGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_tunnel_dest_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.TunnelDestGroup) + assert response.name == "name_value" + assert response.cidrs == ["cidrs_value"] + assert response.fqdns == ["fqdns_value"] + + +def test_create_tunnel_dest_group_rest_required_fields( + request_type=service.CreateTunnelDestGroupRequest, +): + transport_class = transports.IdentityAwareProxyAdminServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["tunnel_dest_group_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "tunnelDestGroupId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_tunnel_dest_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "tunnelDestGroupId" in jsonified_request + assert ( + jsonified_request["tunnelDestGroupId"] == request_init["tunnel_dest_group_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["tunnelDestGroupId"] = "tunnel_dest_group_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_tunnel_dest_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("tunnel_dest_group_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "tunnelDestGroupId" in jsonified_request + assert jsonified_request["tunnelDestGroupId"] == "tunnel_dest_group_id_value" + + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.TunnelDestGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.TunnelDestGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_tunnel_dest_group(request) + + expected_params = [ + ( + "tunnelDestGroupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_tunnel_dest_group_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_tunnel_dest_group._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("tunnelDestGroupId",)) + & set( + ( + "parent", + "tunnelDestGroup", + "tunnelDestGroupId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_tunnel_dest_group_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyAdminServiceRestInterceptor(), + ) + client = IdentityAwareProxyAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_create_tunnel_dest_group", + ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "pre_create_tunnel_dest_group", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateTunnelDestGroupRequest.pb( + service.CreateTunnelDestGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.TunnelDestGroup.to_json( + service.TunnelDestGroup() + ) + + request = service.CreateTunnelDestGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.TunnelDestGroup() + + client.create_tunnel_dest_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_tunnel_dest_group_rest_bad_request( + transport: str = "rest", request_type=service.CreateTunnelDestGroupRequest +): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/iap_tunnel/locations/sample2"} + request_init["tunnel_dest_group"] = { + "name": "name_value", + "cidrs": ["cidrs_value1", "cidrs_value2"], + "fqdns": ["fqdns_value1", "fqdns_value2"], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_tunnel_dest_group(request) + + +def test_create_tunnel_dest_group_rest_flattened(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.TunnelDestGroup() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/iap_tunnel/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + tunnel_dest_group=service.TunnelDestGroup(name="name_value"), + tunnel_dest_group_id="tunnel_dest_group_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.TunnelDestGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_tunnel_dest_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/iap_tunnel/locations/*}/destGroups" + % client.transport._host, + args[1], + ) + + +def test_create_tunnel_dest_group_rest_flattened_error(transport: str = "rest"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_tunnel_dest_group( + service.CreateTunnelDestGroupRequest(), + parent="parent_value", + tunnel_dest_group=service.TunnelDestGroup(name="name_value"), + tunnel_dest_group_id="tunnel_dest_group_id_value", + ) + + +def test_create_tunnel_dest_group_rest_error(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetTunnelDestGroupRequest, + dict, + ], +) +def test_get_tunnel_dest_group_rest(request_type): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/iap_tunnel/locations/sample2/destGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.TunnelDestGroup( + name="name_value", + cidrs=["cidrs_value"], + fqdns=["fqdns_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.TunnelDestGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_tunnel_dest_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.TunnelDestGroup) + assert response.name == "name_value" + assert response.cidrs == ["cidrs_value"] + assert response.fqdns == ["fqdns_value"] + + +def test_get_tunnel_dest_group_rest_required_fields( + request_type=service.GetTunnelDestGroupRequest, +): + transport_class = transports.IdentityAwareProxyAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_tunnel_dest_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_tunnel_dest_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.TunnelDestGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.TunnelDestGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_tunnel_dest_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_tunnel_dest_group_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_tunnel_dest_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_tunnel_dest_group_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyAdminServiceRestInterceptor(), + ) + client = IdentityAwareProxyAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_get_tunnel_dest_group", + ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "pre_get_tunnel_dest_group", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetTunnelDestGroupRequest.pb( + service.GetTunnelDestGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.TunnelDestGroup.to_json( + service.TunnelDestGroup() + ) + + request = service.GetTunnelDestGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.TunnelDestGroup() + + client.get_tunnel_dest_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_tunnel_dest_group_rest_bad_request( + transport: str = "rest", request_type=service.GetTunnelDestGroupRequest +): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/iap_tunnel/locations/sample2/destGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_tunnel_dest_group(request) + + +def test_get_tunnel_dest_group_rest_flattened(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.TunnelDestGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/iap_tunnel/locations/sample2/destGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.TunnelDestGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_tunnel_dest_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/iap_tunnel/locations/*/destGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_tunnel_dest_group_rest_flattened_error(transport: str = "rest"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_tunnel_dest_group( + service.GetTunnelDestGroupRequest(), + name="name_value", + ) + + +def test_get_tunnel_dest_group_rest_error(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteTunnelDestGroupRequest, + dict, + ], +) +def test_delete_tunnel_dest_group_rest(request_type): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/iap_tunnel/locations/sample2/destGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_tunnel_dest_group(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_tunnel_dest_group_rest_required_fields( + request_type=service.DeleteTunnelDestGroupRequest, +): + transport_class = transports.IdentityAwareProxyAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_tunnel_dest_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_tunnel_dest_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_tunnel_dest_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_tunnel_dest_group_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_tunnel_dest_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_tunnel_dest_group_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyAdminServiceRestInterceptor(), + ) + client = IdentityAwareProxyAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "pre_delete_tunnel_dest_group", + ) as pre: + pre.assert_not_called() + pb_message = service.DeleteTunnelDestGroupRequest.pb( + service.DeleteTunnelDestGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = service.DeleteTunnelDestGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_tunnel_dest_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_tunnel_dest_group_rest_bad_request( + transport: str = "rest", request_type=service.DeleteTunnelDestGroupRequest +): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/iap_tunnel/locations/sample2/destGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_tunnel_dest_group(request) + + +def test_delete_tunnel_dest_group_rest_flattened(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/iap_tunnel/locations/sample2/destGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_tunnel_dest_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/iap_tunnel/locations/*/destGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_tunnel_dest_group_rest_flattened_error(transport: str = "rest"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tunnel_dest_group( + service.DeleteTunnelDestGroupRequest(), + name="name_value", + ) + + +def test_delete_tunnel_dest_group_rest_error(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateTunnelDestGroupRequest, + dict, + ], +) +def test_update_tunnel_dest_group_rest(request_type): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "tunnel_dest_group": { + "name": "projects/sample1/iap_tunnel/locations/sample2/destGroups/sample3" + } + } + request_init["tunnel_dest_group"] = { + "name": "projects/sample1/iap_tunnel/locations/sample2/destGroups/sample3", + "cidrs": ["cidrs_value1", "cidrs_value2"], + "fqdns": ["fqdns_value1", "fqdns_value2"], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.TunnelDestGroup( + name="name_value", + cidrs=["cidrs_value"], + fqdns=["fqdns_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.TunnelDestGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_tunnel_dest_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.TunnelDestGroup) + assert response.name == "name_value" + assert response.cidrs == ["cidrs_value"] + assert response.fqdns == ["fqdns_value"] + + +def test_update_tunnel_dest_group_rest_required_fields( + request_type=service.UpdateTunnelDestGroupRequest, +): + transport_class = transports.IdentityAwareProxyAdminServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_tunnel_dest_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_tunnel_dest_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.TunnelDestGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.TunnelDestGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_tunnel_dest_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_tunnel_dest_group_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_tunnel_dest_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("tunnelDestGroup",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_tunnel_dest_group_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyAdminServiceRestInterceptor(), + ) + client = IdentityAwareProxyAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_update_tunnel_dest_group", + ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "pre_update_tunnel_dest_group", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateTunnelDestGroupRequest.pb( + service.UpdateTunnelDestGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.TunnelDestGroup.to_json( + service.TunnelDestGroup() + ) + + request = service.UpdateTunnelDestGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.TunnelDestGroup() + + client.update_tunnel_dest_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_tunnel_dest_group_rest_bad_request( + transport: str = "rest", request_type=service.UpdateTunnelDestGroupRequest +): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "tunnel_dest_group": { + "name": "projects/sample1/iap_tunnel/locations/sample2/destGroups/sample3" + } + } + request_init["tunnel_dest_group"] = { + "name": "projects/sample1/iap_tunnel/locations/sample2/destGroups/sample3", + "cidrs": ["cidrs_value1", "cidrs_value2"], + "fqdns": ["fqdns_value1", "fqdns_value2"], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_tunnel_dest_group(request) + + +def test_update_tunnel_dest_group_rest_flattened(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.TunnelDestGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "tunnel_dest_group": { + "name": "projects/sample1/iap_tunnel/locations/sample2/destGroups/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + tunnel_dest_group=service.TunnelDestGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.TunnelDestGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_tunnel_dest_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{tunnel_dest_group.name=projects/*/iap_tunnel/locations/*/destGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_tunnel_dest_group_rest_flattened_error(transport: str = "rest"): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tunnel_dest_group( + service.UpdateTunnelDestGroupRequest(), + tunnel_dest_group=service.TunnelDestGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_tunnel_dest_group_rest_error(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.IdentityAwareProxyAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.IdentityAwareProxyAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IdentityAwareProxyAdminServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.IdentityAwareProxyAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = IdentityAwareProxyAdminServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = IdentityAwareProxyAdminServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.IdentityAwareProxyAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IdentityAwareProxyAdminServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.IdentityAwareProxyAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = IdentityAwareProxyAdminServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.IdentityAwareProxyAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.IdentityAwareProxyAdminServiceGrpcTransport, + transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport, + transports.IdentityAwareProxyAdminServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = IdentityAwareProxyAdminServiceClient.get_transport_class( + transport_name + )( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.IdentityAwareProxyAdminServiceGrpcTransport, + ) + + +def test_identity_aware_proxy_admin_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.IdentityAwareProxyAdminServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_identity_aware_proxy_admin_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.iap_v1.services.identity_aware_proxy_admin_service.transports.IdentityAwareProxyAdminServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.IdentityAwareProxyAdminServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_iap_settings", + "update_iap_settings", + "list_tunnel_dest_groups", + "create_tunnel_dest_group", + "get_tunnel_dest_group", + "delete_tunnel_dest_group", + "update_tunnel_dest_group", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_identity_aware_proxy_admin_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.iap_v1.services.identity_aware_proxy_admin_service.transports.IdentityAwareProxyAdminServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.IdentityAwareProxyAdminServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_identity_aware_proxy_admin_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.iap_v1.services.identity_aware_proxy_admin_service.transports.IdentityAwareProxyAdminServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.IdentityAwareProxyAdminServiceTransport() + adc.assert_called_once() + + +def test_identity_aware_proxy_admin_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + IdentityAwareProxyAdminServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.IdentityAwareProxyAdminServiceGrpcTransport, + transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport, + ], +) +def test_identity_aware_proxy_admin_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.IdentityAwareProxyAdminServiceGrpcTransport, + transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport, + transports.IdentityAwareProxyAdminServiceRestTransport, + ], +) +def test_identity_aware_proxy_admin_service_transport_auth_gdch_credentials( + transport_class, +): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.IdentityAwareProxyAdminServiceGrpcTransport, grpc_helpers), + ( + transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +def test_identity_aware_proxy_admin_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "iap.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="iap.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.IdentityAwareProxyAdminServiceGrpcTransport, + transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport, + ], +) +def test_identity_aware_proxy_admin_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_identity_aware_proxy_admin_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.IdentityAwareProxyAdminServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_identity_aware_proxy_admin_service_host_no_port(transport_name): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint="iap.googleapis.com"), + transport=transport_name, + ) + assert client.transport._host == ( + "iap.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iap.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_identity_aware_proxy_admin_service_host_with_port(transport_name): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="iap.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "iap.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iap.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_identity_aware_proxy_admin_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = IdentityAwareProxyAdminServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = IdentityAwareProxyAdminServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.get_iap_settings._session + session2 = client2.transport.get_iap_settings._session + assert session1 != session2 + session1 = client1.transport.update_iap_settings._session + session2 = client2.transport.update_iap_settings._session + assert session1 != session2 + session1 = client1.transport.list_tunnel_dest_groups._session + session2 = client2.transport.list_tunnel_dest_groups._session + assert session1 != session2 + session1 = client1.transport.create_tunnel_dest_group._session + session2 = client2.transport.create_tunnel_dest_group._session + assert session1 != session2 + session1 = client1.transport.get_tunnel_dest_group._session + session2 = client2.transport.get_tunnel_dest_group._session + assert session1 != session2 + session1 = client1.transport.delete_tunnel_dest_group._session + session2 = client2.transport.delete_tunnel_dest_group._session + assert session1 != session2 + session1 = client1.transport.update_tunnel_dest_group._session + session2 = client2.transport.update_tunnel_dest_group._session + assert session1 != session2 + + +def test_identity_aware_proxy_admin_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.IdentityAwareProxyAdminServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_identity_aware_proxy_admin_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.IdentityAwareProxyAdminServiceGrpcTransport, + transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport, + ], +) +def test_identity_aware_proxy_admin_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.IdentityAwareProxyAdminServiceGrpcTransport, + transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport, + ], +) +def test_identity_aware_proxy_admin_service_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_tunnel_dest_group_path(): + project = "squid" + location = "clam" + dest_group = "whelk" + expected = "projects/{project}/iap_tunnel/locations/{location}/destGroups/{dest_group}".format( + project=project, + location=location, + dest_group=dest_group, + ) + actual = IdentityAwareProxyAdminServiceClient.tunnel_dest_group_path( + project, location, dest_group + ) + assert expected == actual + + +def test_parse_tunnel_dest_group_path(): + expected = { + "project": "octopus", + "location": "oyster", + "dest_group": "nudibranch", + } + path = IdentityAwareProxyAdminServiceClient.tunnel_dest_group_path(**expected) + + # Check that the path construction is reversible. + actual = IdentityAwareProxyAdminServiceClient.parse_tunnel_dest_group_path(path) + assert expected == actual + + +def test_tunnel_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/iap_tunnel/locations/{location}".format( + project=project, + location=location, + ) + actual = IdentityAwareProxyAdminServiceClient.tunnel_location_path( + project, location + ) + assert expected == actual + + +def test_parse_tunnel_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = IdentityAwareProxyAdminServiceClient.tunnel_location_path(**expected) + + # Check that the path construction is reversible. + actual = IdentityAwareProxyAdminServiceClient.parse_tunnel_location_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = IdentityAwareProxyAdminServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = IdentityAwareProxyAdminServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = IdentityAwareProxyAdminServiceClient.parse_common_billing_account_path( + path + ) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = IdentityAwareProxyAdminServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = IdentityAwareProxyAdminServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = IdentityAwareProxyAdminServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = IdentityAwareProxyAdminServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = IdentityAwareProxyAdminServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = IdentityAwareProxyAdminServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = IdentityAwareProxyAdminServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = IdentityAwareProxyAdminServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = IdentityAwareProxyAdminServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = IdentityAwareProxyAdminServiceClient.common_location_path( + project, location + ) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = IdentityAwareProxyAdminServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = IdentityAwareProxyAdminServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.IdentityAwareProxyAdminServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.IdentityAwareProxyAdminServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = IdentityAwareProxyAdminServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = IdentityAwareProxyAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = IdentityAwareProxyAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + IdentityAwareProxyAdminServiceClient, + transports.IdentityAwareProxyAdminServiceGrpcTransport, + ), + ( + IdentityAwareProxyAdminServiceAsyncClient, + transports.IdentityAwareProxyAdminServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_o_auth_service.py b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_o_auth_service.py new file mode 100644 index 000000000000..b246966534b8 --- /dev/null +++ b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_o_auth_service.py @@ -0,0 +1,4874 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.iap_v1.services.identity_aware_proxy_o_auth_service import ( + IdentityAwareProxyOAuthServiceAsyncClient, + IdentityAwareProxyOAuthServiceClient, + pagers, + transports, +) +from google.cloud.iap_v1.types import service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert IdentityAwareProxyOAuthServiceClient._get_default_mtls_endpoint(None) is None + assert ( + IdentityAwareProxyOAuthServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + IdentityAwareProxyOAuthServiceClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + IdentityAwareProxyOAuthServiceClient._get_default_mtls_endpoint( + sandbox_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + IdentityAwareProxyOAuthServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + IdentityAwareProxyOAuthServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (IdentityAwareProxyOAuthServiceClient, "grpc"), + (IdentityAwareProxyOAuthServiceAsyncClient, "grpc_asyncio"), + (IdentityAwareProxyOAuthServiceClient, "rest"), + ], +) +def test_identity_aware_proxy_o_auth_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "iap.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iap.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.IdentityAwareProxyOAuthServiceGrpcTransport, "grpc"), + (transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.IdentityAwareProxyOAuthServiceRestTransport, "rest"), + ], +) +def test_identity_aware_proxy_o_auth_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (IdentityAwareProxyOAuthServiceClient, "grpc"), + (IdentityAwareProxyOAuthServiceAsyncClient, "grpc_asyncio"), + (IdentityAwareProxyOAuthServiceClient, "rest"), + ], +) +def test_identity_aware_proxy_o_auth_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "iap.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iap.googleapis.com" + ) + + +def test_identity_aware_proxy_o_auth_service_client_get_transport_class(): + transport = IdentityAwareProxyOAuthServiceClient.get_transport_class() + available_transports = [ + transports.IdentityAwareProxyOAuthServiceGrpcTransport, + transports.IdentityAwareProxyOAuthServiceRestTransport, + ] + assert transport in available_transports + + transport = IdentityAwareProxyOAuthServiceClient.get_transport_class("grpc") + assert transport == transports.IdentityAwareProxyOAuthServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + IdentityAwareProxyOAuthServiceClient, + transports.IdentityAwareProxyOAuthServiceGrpcTransport, + "grpc", + ), + ( + IdentityAwareProxyOAuthServiceAsyncClient, + transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + IdentityAwareProxyOAuthServiceClient, + transports.IdentityAwareProxyOAuthServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + IdentityAwareProxyOAuthServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(IdentityAwareProxyOAuthServiceClient), +) +@mock.patch.object( + IdentityAwareProxyOAuthServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(IdentityAwareProxyOAuthServiceAsyncClient), +) +def test_identity_aware_proxy_o_auth_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + IdentityAwareProxyOAuthServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + IdentityAwareProxyOAuthServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + IdentityAwareProxyOAuthServiceClient, + transports.IdentityAwareProxyOAuthServiceGrpcTransport, + "grpc", + "true", + ), + ( + IdentityAwareProxyOAuthServiceAsyncClient, + transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + IdentityAwareProxyOAuthServiceClient, + transports.IdentityAwareProxyOAuthServiceGrpcTransport, + "grpc", + "false", + ), + ( + IdentityAwareProxyOAuthServiceAsyncClient, + transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + IdentityAwareProxyOAuthServiceClient, + transports.IdentityAwareProxyOAuthServiceRestTransport, + "rest", + "true", + ), + ( + IdentityAwareProxyOAuthServiceClient, + transports.IdentityAwareProxyOAuthServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + IdentityAwareProxyOAuthServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(IdentityAwareProxyOAuthServiceClient), +) +@mock.patch.object( + IdentityAwareProxyOAuthServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(IdentityAwareProxyOAuthServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_identity_aware_proxy_o_auth_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [IdentityAwareProxyOAuthServiceClient, IdentityAwareProxyOAuthServiceAsyncClient], +) +@mock.patch.object( + IdentityAwareProxyOAuthServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(IdentityAwareProxyOAuthServiceClient), +) +@mock.patch.object( + IdentityAwareProxyOAuthServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(IdentityAwareProxyOAuthServiceAsyncClient), +) +def test_identity_aware_proxy_o_auth_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + IdentityAwareProxyOAuthServiceClient, + transports.IdentityAwareProxyOAuthServiceGrpcTransport, + "grpc", + ), + ( + IdentityAwareProxyOAuthServiceAsyncClient, + transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + IdentityAwareProxyOAuthServiceClient, + transports.IdentityAwareProxyOAuthServiceRestTransport, + "rest", + ), + ], +) +def test_identity_aware_proxy_o_auth_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + IdentityAwareProxyOAuthServiceClient, + transports.IdentityAwareProxyOAuthServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + IdentityAwareProxyOAuthServiceAsyncClient, + transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + IdentityAwareProxyOAuthServiceClient, + transports.IdentityAwareProxyOAuthServiceRestTransport, + "rest", + None, + ), + ], +) +def test_identity_aware_proxy_o_auth_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_identity_aware_proxy_o_auth_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.iap_v1.services.identity_aware_proxy_o_auth_service.transports.IdentityAwareProxyOAuthServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = IdentityAwareProxyOAuthServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + IdentityAwareProxyOAuthServiceClient, + transports.IdentityAwareProxyOAuthServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + IdentityAwareProxyOAuthServiceAsyncClient, + transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_identity_aware_proxy_o_auth_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "iap.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="iap.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListBrandsRequest, + dict, + ], +) +def test_list_brands(request_type, transport: str = "grpc"): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_brands), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListBrandsResponse() + response = client.list_brands(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListBrandsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.ListBrandsResponse) + + +def test_list_brands_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_brands), "__call__") as call: + client.list_brands() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListBrandsRequest() + + +@pytest.mark.asyncio +async def test_list_brands_async( + transport: str = "grpc_asyncio", request_type=service.ListBrandsRequest +): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_brands), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListBrandsResponse() + ) + response = await client.list_brands(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListBrandsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.ListBrandsResponse) + + +@pytest.mark.asyncio +async def test_list_brands_async_from_dict(): + await test_list_brands_async(request_type=dict) + + +def test_list_brands_field_headers(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListBrandsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_brands), "__call__") as call: + call.return_value = service.ListBrandsResponse() + client.list_brands(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_brands_field_headers_async(): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListBrandsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_brands), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListBrandsResponse() + ) + await client.list_brands(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateBrandRequest, + dict, + ], +) +def test_create_brand(request_type, transport: str = "grpc"): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_brand), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.Brand( + name="name_value", + support_email="support_email_value", + application_title="application_title_value", + org_internal_only=True, + ) + response = client.create_brand(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateBrandRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Brand) + assert response.name == "name_value" + assert response.support_email == "support_email_value" + assert response.application_title == "application_title_value" + assert response.org_internal_only is True + + +def test_create_brand_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_brand), "__call__") as call: + client.create_brand() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateBrandRequest() + + +@pytest.mark.asyncio +async def test_create_brand_async( + transport: str = "grpc_asyncio", request_type=service.CreateBrandRequest +): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_brand), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.Brand( + name="name_value", + support_email="support_email_value", + application_title="application_title_value", + org_internal_only=True, + ) + ) + response = await client.create_brand(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateBrandRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Brand) + assert response.name == "name_value" + assert response.support_email == "support_email_value" + assert response.application_title == "application_title_value" + assert response.org_internal_only is True + + +@pytest.mark.asyncio +async def test_create_brand_async_from_dict(): + await test_create_brand_async(request_type=dict) + + +def test_create_brand_field_headers(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateBrandRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_brand), "__call__") as call: + call.return_value = service.Brand() + client.create_brand(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_brand_field_headers_async(): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateBrandRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_brand), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.Brand()) + await client.create_brand(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetBrandRequest, + dict, + ], +) +def test_get_brand(request_type, transport: str = "grpc"): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_brand), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.Brand( + name="name_value", + support_email="support_email_value", + application_title="application_title_value", + org_internal_only=True, + ) + response = client.get_brand(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetBrandRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Brand) + assert response.name == "name_value" + assert response.support_email == "support_email_value" + assert response.application_title == "application_title_value" + assert response.org_internal_only is True + + +def test_get_brand_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_brand), "__call__") as call: + client.get_brand() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetBrandRequest() + + +@pytest.mark.asyncio +async def test_get_brand_async( + transport: str = "grpc_asyncio", request_type=service.GetBrandRequest +): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_brand), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.Brand( + name="name_value", + support_email="support_email_value", + application_title="application_title_value", + org_internal_only=True, + ) + ) + response = await client.get_brand(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetBrandRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Brand) + assert response.name == "name_value" + assert response.support_email == "support_email_value" + assert response.application_title == "application_title_value" + assert response.org_internal_only is True + + +@pytest.mark.asyncio +async def test_get_brand_async_from_dict(): + await test_get_brand_async(request_type=dict) + + +def test_get_brand_field_headers(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetBrandRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_brand), "__call__") as call: + call.return_value = service.Brand() + client.get_brand(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_brand_field_headers_async(): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetBrandRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_brand), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.Brand()) + await client.get_brand(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateIdentityAwareProxyClientRequest, + dict, + ], +) +def test_create_identity_aware_proxy_client(request_type, transport: str = "grpc"): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_identity_aware_proxy_client), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.IdentityAwareProxyClient( + name="name_value", + secret="secret_value", + display_name="display_name_value", + ) + response = client.create_identity_aware_proxy_client(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateIdentityAwareProxyClientRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IdentityAwareProxyClient) + assert response.name == "name_value" + assert response.secret == "secret_value" + assert response.display_name == "display_name_value" + + +def test_create_identity_aware_proxy_client_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_identity_aware_proxy_client), "__call__" + ) as call: + client.create_identity_aware_proxy_client() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateIdentityAwareProxyClientRequest() + + +@pytest.mark.asyncio +async def test_create_identity_aware_proxy_client_async( + transport: str = "grpc_asyncio", + request_type=service.CreateIdentityAwareProxyClientRequest, +): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_identity_aware_proxy_client), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.IdentityAwareProxyClient( + name="name_value", + secret="secret_value", + display_name="display_name_value", + ) + ) + response = await client.create_identity_aware_proxy_client(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateIdentityAwareProxyClientRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IdentityAwareProxyClient) + assert response.name == "name_value" + assert response.secret == "secret_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_create_identity_aware_proxy_client_async_from_dict(): + await test_create_identity_aware_proxy_client_async(request_type=dict) + + +def test_create_identity_aware_proxy_client_field_headers(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateIdentityAwareProxyClientRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_identity_aware_proxy_client), "__call__" + ) as call: + call.return_value = service.IdentityAwareProxyClient() + client.create_identity_aware_proxy_client(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_identity_aware_proxy_client_field_headers_async(): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateIdentityAwareProxyClientRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_identity_aware_proxy_client), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.IdentityAwareProxyClient() + ) + await client.create_identity_aware_proxy_client(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListIdentityAwareProxyClientsRequest, + dict, + ], +) +def test_list_identity_aware_proxy_clients(request_type, transport: str = "grpc"): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_identity_aware_proxy_clients), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListIdentityAwareProxyClientsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_identity_aware_proxy_clients(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListIdentityAwareProxyClientsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListIdentityAwareProxyClientsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_identity_aware_proxy_clients_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_identity_aware_proxy_clients), "__call__" + ) as call: + client.list_identity_aware_proxy_clients() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListIdentityAwareProxyClientsRequest() + + +@pytest.mark.asyncio +async def test_list_identity_aware_proxy_clients_async( + transport: str = "grpc_asyncio", + request_type=service.ListIdentityAwareProxyClientsRequest, +): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_identity_aware_proxy_clients), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListIdentityAwareProxyClientsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_identity_aware_proxy_clients(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListIdentityAwareProxyClientsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListIdentityAwareProxyClientsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_identity_aware_proxy_clients_async_from_dict(): + await test_list_identity_aware_proxy_clients_async(request_type=dict) + + +def test_list_identity_aware_proxy_clients_field_headers(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListIdentityAwareProxyClientsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_identity_aware_proxy_clients), "__call__" + ) as call: + call.return_value = service.ListIdentityAwareProxyClientsResponse() + client.list_identity_aware_proxy_clients(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_identity_aware_proxy_clients_field_headers_async(): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListIdentityAwareProxyClientsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_identity_aware_proxy_clients), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListIdentityAwareProxyClientsResponse() + ) + await client.list_identity_aware_proxy_clients(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_identity_aware_proxy_clients_pager(transport_name: str = "grpc"): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_identity_aware_proxy_clients), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[ + service.IdentityAwareProxyClient(), + service.IdentityAwareProxyClient(), + service.IdentityAwareProxyClient(), + ], + next_page_token="abc", + ), + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[], + next_page_token="def", + ), + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[ + service.IdentityAwareProxyClient(), + ], + next_page_token="ghi", + ), + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[ + service.IdentityAwareProxyClient(), + service.IdentityAwareProxyClient(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_identity_aware_proxy_clients(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, service.IdentityAwareProxyClient) for i in results) + + +def test_list_identity_aware_proxy_clients_pages(transport_name: str = "grpc"): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_identity_aware_proxy_clients), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[ + service.IdentityAwareProxyClient(), + service.IdentityAwareProxyClient(), + service.IdentityAwareProxyClient(), + ], + next_page_token="abc", + ), + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[], + next_page_token="def", + ), + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[ + service.IdentityAwareProxyClient(), + ], + next_page_token="ghi", + ), + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[ + service.IdentityAwareProxyClient(), + service.IdentityAwareProxyClient(), + ], + ), + RuntimeError, + ) + pages = list(client.list_identity_aware_proxy_clients(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_identity_aware_proxy_clients_async_pager(): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_identity_aware_proxy_clients), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[ + service.IdentityAwareProxyClient(), + service.IdentityAwareProxyClient(), + service.IdentityAwareProxyClient(), + ], + next_page_token="abc", + ), + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[], + next_page_token="def", + ), + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[ + service.IdentityAwareProxyClient(), + ], + next_page_token="ghi", + ), + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[ + service.IdentityAwareProxyClient(), + service.IdentityAwareProxyClient(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_identity_aware_proxy_clients( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, service.IdentityAwareProxyClient) for i in responses) + + +@pytest.mark.asyncio +async def test_list_identity_aware_proxy_clients_async_pages(): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_identity_aware_proxy_clients), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[ + service.IdentityAwareProxyClient(), + service.IdentityAwareProxyClient(), + service.IdentityAwareProxyClient(), + ], + next_page_token="abc", + ), + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[], + next_page_token="def", + ), + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[ + service.IdentityAwareProxyClient(), + ], + next_page_token="ghi", + ), + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[ + service.IdentityAwareProxyClient(), + service.IdentityAwareProxyClient(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_identity_aware_proxy_clients(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetIdentityAwareProxyClientRequest, + dict, + ], +) +def test_get_identity_aware_proxy_client(request_type, transport: str = "grpc"): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_identity_aware_proxy_client), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.IdentityAwareProxyClient( + name="name_value", + secret="secret_value", + display_name="display_name_value", + ) + response = client.get_identity_aware_proxy_client(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetIdentityAwareProxyClientRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IdentityAwareProxyClient) + assert response.name == "name_value" + assert response.secret == "secret_value" + assert response.display_name == "display_name_value" + + +def test_get_identity_aware_proxy_client_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_identity_aware_proxy_client), "__call__" + ) as call: + client.get_identity_aware_proxy_client() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetIdentityAwareProxyClientRequest() + + +@pytest.mark.asyncio +async def test_get_identity_aware_proxy_client_async( + transport: str = "grpc_asyncio", + request_type=service.GetIdentityAwareProxyClientRequest, +): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_identity_aware_proxy_client), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.IdentityAwareProxyClient( + name="name_value", + secret="secret_value", + display_name="display_name_value", + ) + ) + response = await client.get_identity_aware_proxy_client(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetIdentityAwareProxyClientRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IdentityAwareProxyClient) + assert response.name == "name_value" + assert response.secret == "secret_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_identity_aware_proxy_client_async_from_dict(): + await test_get_identity_aware_proxy_client_async(request_type=dict) + + +def test_get_identity_aware_proxy_client_field_headers(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetIdentityAwareProxyClientRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_identity_aware_proxy_client), "__call__" + ) as call: + call.return_value = service.IdentityAwareProxyClient() + client.get_identity_aware_proxy_client(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_identity_aware_proxy_client_field_headers_async(): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetIdentityAwareProxyClientRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_identity_aware_proxy_client), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.IdentityAwareProxyClient() + ) + await client.get_identity_aware_proxy_client(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.ResetIdentityAwareProxyClientSecretRequest, + dict, + ], +) +def test_reset_identity_aware_proxy_client_secret( + request_type, transport: str = "grpc" +): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_identity_aware_proxy_client_secret), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.IdentityAwareProxyClient( + name="name_value", + secret="secret_value", + display_name="display_name_value", + ) + response = client.reset_identity_aware_proxy_client_secret(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ResetIdentityAwareProxyClientSecretRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IdentityAwareProxyClient) + assert response.name == "name_value" + assert response.secret == "secret_value" + assert response.display_name == "display_name_value" + + +def test_reset_identity_aware_proxy_client_secret_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_identity_aware_proxy_client_secret), "__call__" + ) as call: + client.reset_identity_aware_proxy_client_secret() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ResetIdentityAwareProxyClientSecretRequest() + + +@pytest.mark.asyncio +async def test_reset_identity_aware_proxy_client_secret_async( + transport: str = "grpc_asyncio", + request_type=service.ResetIdentityAwareProxyClientSecretRequest, +): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_identity_aware_proxy_client_secret), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.IdentityAwareProxyClient( + name="name_value", + secret="secret_value", + display_name="display_name_value", + ) + ) + response = await client.reset_identity_aware_proxy_client_secret(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ResetIdentityAwareProxyClientSecretRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IdentityAwareProxyClient) + assert response.name == "name_value" + assert response.secret == "secret_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_reset_identity_aware_proxy_client_secret_async_from_dict(): + await test_reset_identity_aware_proxy_client_secret_async(request_type=dict) + + +def test_reset_identity_aware_proxy_client_secret_field_headers(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ResetIdentityAwareProxyClientSecretRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_identity_aware_proxy_client_secret), "__call__" + ) as call: + call.return_value = service.IdentityAwareProxyClient() + client.reset_identity_aware_proxy_client_secret(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_reset_identity_aware_proxy_client_secret_field_headers_async(): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ResetIdentityAwareProxyClientSecretRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reset_identity_aware_proxy_client_secret), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.IdentityAwareProxyClient() + ) + await client.reset_identity_aware_proxy_client_secret(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteIdentityAwareProxyClientRequest, + dict, + ], +) +def test_delete_identity_aware_proxy_client(request_type, transport: str = "grpc"): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_identity_aware_proxy_client), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_identity_aware_proxy_client(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteIdentityAwareProxyClientRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_identity_aware_proxy_client_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_identity_aware_proxy_client), "__call__" + ) as call: + client.delete_identity_aware_proxy_client() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteIdentityAwareProxyClientRequest() + + +@pytest.mark.asyncio +async def test_delete_identity_aware_proxy_client_async( + transport: str = "grpc_asyncio", + request_type=service.DeleteIdentityAwareProxyClientRequest, +): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_identity_aware_proxy_client), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_identity_aware_proxy_client(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteIdentityAwareProxyClientRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_identity_aware_proxy_client_async_from_dict(): + await test_delete_identity_aware_proxy_client_async(request_type=dict) + + +def test_delete_identity_aware_proxy_client_field_headers(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteIdentityAwareProxyClientRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_identity_aware_proxy_client), "__call__" + ) as call: + call.return_value = None + client.delete_identity_aware_proxy_client(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_identity_aware_proxy_client_field_headers_async(): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteIdentityAwareProxyClientRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_identity_aware_proxy_client), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_identity_aware_proxy_client(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListBrandsRequest, + dict, + ], +) +def test_list_brands_rest(request_type): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListBrandsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListBrandsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_brands(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.ListBrandsResponse) + + +def test_list_brands_rest_required_fields(request_type=service.ListBrandsRequest): + transport_class = transports.IdentityAwareProxyOAuthServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_brands._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_brands._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListBrandsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListBrandsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_brands(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_brands_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_brands._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_brands_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyOAuthServiceRestInterceptor(), + ) + client = IdentityAwareProxyOAuthServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, "post_list_brands" + ) as post, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, "pre_list_brands" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListBrandsRequest.pb(service.ListBrandsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListBrandsResponse.to_json( + service.ListBrandsResponse() + ) + + request = service.ListBrandsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListBrandsResponse() + + client.list_brands( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_brands_rest_bad_request( + transport: str = "rest", request_type=service.ListBrandsRequest +): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_brands(request) + + +def test_list_brands_rest_error(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateBrandRequest, + dict, + ], +) +def test_create_brand_rest(request_type): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["brand"] = { + "name": "name_value", + "support_email": "support_email_value", + "application_title": "application_title_value", + "org_internal_only": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.Brand( + name="name_value", + support_email="support_email_value", + application_title="application_title_value", + org_internal_only=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.Brand.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_brand(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Brand) + assert response.name == "name_value" + assert response.support_email == "support_email_value" + assert response.application_title == "application_title_value" + assert response.org_internal_only is True + + +def test_create_brand_rest_required_fields(request_type=service.CreateBrandRequest): + transport_class = transports.IdentityAwareProxyOAuthServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_brand._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_brand._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.Brand() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.Brand.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_brand(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_brand_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_brand._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "brand", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_brand_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyOAuthServiceRestInterceptor(), + ) + client = IdentityAwareProxyOAuthServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, "post_create_brand" + ) as post, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, "pre_create_brand" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateBrandRequest.pb(service.CreateBrandRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.Brand.to_json(service.Brand()) + + request = service.CreateBrandRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.Brand() + + client.create_brand( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_brand_rest_bad_request( + transport: str = "rest", request_type=service.CreateBrandRequest +): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["brand"] = { + "name": "name_value", + "support_email": "support_email_value", + "application_title": "application_title_value", + "org_internal_only": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_brand(request) + + +def test_create_brand_rest_error(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetBrandRequest, + dict, + ], +) +def test_get_brand_rest(request_type): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/brands/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.Brand( + name="name_value", + support_email="support_email_value", + application_title="application_title_value", + org_internal_only=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.Brand.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_brand(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Brand) + assert response.name == "name_value" + assert response.support_email == "support_email_value" + assert response.application_title == "application_title_value" + assert response.org_internal_only is True + + +def test_get_brand_rest_required_fields(request_type=service.GetBrandRequest): + transport_class = transports.IdentityAwareProxyOAuthServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_brand._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_brand._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.Brand() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.Brand.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_brand(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_brand_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_brand._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_brand_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyOAuthServiceRestInterceptor(), + ) + client = IdentityAwareProxyOAuthServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, "post_get_brand" + ) as post, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, "pre_get_brand" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetBrandRequest.pb(service.GetBrandRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.Brand.to_json(service.Brand()) + + request = service.GetBrandRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.Brand() + + client.get_brand( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_brand_rest_bad_request( + transport: str = "rest", request_type=service.GetBrandRequest +): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/brands/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_brand(request) + + +def test_get_brand_rest_error(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateIdentityAwareProxyClientRequest, + dict, + ], +) +def test_create_identity_aware_proxy_client_rest(request_type): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/brands/sample2"} + request_init["identity_aware_proxy_client"] = { + "name": "name_value", + "secret": "secret_value", + "display_name": "display_name_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.IdentityAwareProxyClient( + name="name_value", + secret="secret_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.IdentityAwareProxyClient.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_identity_aware_proxy_client(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IdentityAwareProxyClient) + assert response.name == "name_value" + assert response.secret == "secret_value" + assert response.display_name == "display_name_value" + + +def test_create_identity_aware_proxy_client_rest_required_fields( + request_type=service.CreateIdentityAwareProxyClientRequest, +): + transport_class = transports.IdentityAwareProxyOAuthServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_identity_aware_proxy_client._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_identity_aware_proxy_client._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.IdentityAwareProxyClient() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.IdentityAwareProxyClient.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_identity_aware_proxy_client(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_identity_aware_proxy_client_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.create_identity_aware_proxy_client._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "identityAwareProxyClient", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_identity_aware_proxy_client_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyOAuthServiceRestInterceptor(), + ) + client = IdentityAwareProxyOAuthServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "post_create_identity_aware_proxy_client", + ) as post, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "pre_create_identity_aware_proxy_client", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateIdentityAwareProxyClientRequest.pb( + service.CreateIdentityAwareProxyClientRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.IdentityAwareProxyClient.to_json( + service.IdentityAwareProxyClient() + ) + + request = service.CreateIdentityAwareProxyClientRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.IdentityAwareProxyClient() + + client.create_identity_aware_proxy_client( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_identity_aware_proxy_client_rest_bad_request( + transport: str = "rest", request_type=service.CreateIdentityAwareProxyClientRequest +): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/brands/sample2"} + request_init["identity_aware_proxy_client"] = { + "name": "name_value", + "secret": "secret_value", + "display_name": "display_name_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_identity_aware_proxy_client(request) + + +def test_create_identity_aware_proxy_client_rest_error(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListIdentityAwareProxyClientsRequest, + dict, + ], +) +def test_list_identity_aware_proxy_clients_rest(request_type): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/brands/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListIdentityAwareProxyClientsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListIdentityAwareProxyClientsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_identity_aware_proxy_clients(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListIdentityAwareProxyClientsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_identity_aware_proxy_clients_rest_required_fields( + request_type=service.ListIdentityAwareProxyClientsRequest, +): + transport_class = transports.IdentityAwareProxyOAuthServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_identity_aware_proxy_clients._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_identity_aware_proxy_clients._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListIdentityAwareProxyClientsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListIdentityAwareProxyClientsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_identity_aware_proxy_clients(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_identity_aware_proxy_clients_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_identity_aware_proxy_clients._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_identity_aware_proxy_clients_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyOAuthServiceRestInterceptor(), + ) + client = IdentityAwareProxyOAuthServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "post_list_identity_aware_proxy_clients", + ) as post, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "pre_list_identity_aware_proxy_clients", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListIdentityAwareProxyClientsRequest.pb( + service.ListIdentityAwareProxyClientsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + service.ListIdentityAwareProxyClientsResponse.to_json( + service.ListIdentityAwareProxyClientsResponse() + ) + ) + + request = service.ListIdentityAwareProxyClientsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListIdentityAwareProxyClientsResponse() + + client.list_identity_aware_proxy_clients( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_identity_aware_proxy_clients_rest_bad_request( + transport: str = "rest", request_type=service.ListIdentityAwareProxyClientsRequest +): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/brands/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_identity_aware_proxy_clients(request) + + +def test_list_identity_aware_proxy_clients_rest_pager(transport: str = "rest"): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[ + service.IdentityAwareProxyClient(), + service.IdentityAwareProxyClient(), + service.IdentityAwareProxyClient(), + ], + next_page_token="abc", + ), + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[], + next_page_token="def", + ), + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[ + service.IdentityAwareProxyClient(), + ], + next_page_token="ghi", + ), + service.ListIdentityAwareProxyClientsResponse( + identity_aware_proxy_clients=[ + service.IdentityAwareProxyClient(), + service.IdentityAwareProxyClient(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + service.ListIdentityAwareProxyClientsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/brands/sample2"} + + pager = client.list_identity_aware_proxy_clients(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, service.IdentityAwareProxyClient) for i in results) + + pages = list( + client.list_identity_aware_proxy_clients(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetIdentityAwareProxyClientRequest, + dict, + ], +) +def test_get_identity_aware_proxy_client_rest(request_type): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/brands/sample2/identityAwareProxyClients/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.IdentityAwareProxyClient( + name="name_value", + secret="secret_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.IdentityAwareProxyClient.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_identity_aware_proxy_client(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IdentityAwareProxyClient) + assert response.name == "name_value" + assert response.secret == "secret_value" + assert response.display_name == "display_name_value" + + +def test_get_identity_aware_proxy_client_rest_required_fields( + request_type=service.GetIdentityAwareProxyClientRequest, +): + transport_class = transports.IdentityAwareProxyOAuthServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_identity_aware_proxy_client._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_identity_aware_proxy_client._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.IdentityAwareProxyClient() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.IdentityAwareProxyClient.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_identity_aware_proxy_client(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_identity_aware_proxy_client_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_identity_aware_proxy_client._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_identity_aware_proxy_client_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyOAuthServiceRestInterceptor(), + ) + client = IdentityAwareProxyOAuthServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "post_get_identity_aware_proxy_client", + ) as post, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "pre_get_identity_aware_proxy_client", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetIdentityAwareProxyClientRequest.pb( + service.GetIdentityAwareProxyClientRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.IdentityAwareProxyClient.to_json( + service.IdentityAwareProxyClient() + ) + + request = service.GetIdentityAwareProxyClientRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.IdentityAwareProxyClient() + + client.get_identity_aware_proxy_client( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_identity_aware_proxy_client_rest_bad_request( + transport: str = "rest", request_type=service.GetIdentityAwareProxyClientRequest +): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/brands/sample2/identityAwareProxyClients/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_identity_aware_proxy_client(request) + + +def test_get_identity_aware_proxy_client_rest_error(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ResetIdentityAwareProxyClientSecretRequest, + dict, + ], +) +def test_reset_identity_aware_proxy_client_secret_rest(request_type): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/brands/sample2/identityAwareProxyClients/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.IdentityAwareProxyClient( + name="name_value", + secret="secret_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.IdentityAwareProxyClient.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.reset_identity_aware_proxy_client_secret(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IdentityAwareProxyClient) + assert response.name == "name_value" + assert response.secret == "secret_value" + assert response.display_name == "display_name_value" + + +def test_reset_identity_aware_proxy_client_secret_rest_required_fields( + request_type=service.ResetIdentityAwareProxyClientSecretRequest, +): + transport_class = transports.IdentityAwareProxyOAuthServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reset_identity_aware_proxy_client_secret._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reset_identity_aware_proxy_client_secret._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.IdentityAwareProxyClient() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.IdentityAwareProxyClient.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.reset_identity_aware_proxy_client_secret(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_reset_identity_aware_proxy_client_secret_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.reset_identity_aware_proxy_client_secret._get_unset_required_fields( + {} + ) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reset_identity_aware_proxy_client_secret_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyOAuthServiceRestInterceptor(), + ) + client = IdentityAwareProxyOAuthServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "post_reset_identity_aware_proxy_client_secret", + ) as post, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "pre_reset_identity_aware_proxy_client_secret", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ResetIdentityAwareProxyClientSecretRequest.pb( + service.ResetIdentityAwareProxyClientSecretRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.IdentityAwareProxyClient.to_json( + service.IdentityAwareProxyClient() + ) + + request = service.ResetIdentityAwareProxyClientSecretRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.IdentityAwareProxyClient() + + client.reset_identity_aware_proxy_client_secret( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reset_identity_aware_proxy_client_secret_rest_bad_request( + transport: str = "rest", + request_type=service.ResetIdentityAwareProxyClientSecretRequest, +): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/brands/sample2/identityAwareProxyClients/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reset_identity_aware_proxy_client_secret(request) + + +def test_reset_identity_aware_proxy_client_secret_rest_error(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteIdentityAwareProxyClientRequest, + dict, + ], +) +def test_delete_identity_aware_proxy_client_rest(request_type): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/brands/sample2/identityAwareProxyClients/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_identity_aware_proxy_client(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_identity_aware_proxy_client_rest_required_fields( + request_type=service.DeleteIdentityAwareProxyClientRequest, +): + transport_class = transports.IdentityAwareProxyOAuthServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_identity_aware_proxy_client._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_identity_aware_proxy_client._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_identity_aware_proxy_client(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_identity_aware_proxy_client_rest_unset_required_fields(): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.delete_identity_aware_proxy_client._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_identity_aware_proxy_client_rest_interceptors(null_interceptor): + transport = transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.IdentityAwareProxyOAuthServiceRestInterceptor(), + ) + client = IdentityAwareProxyOAuthServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "pre_delete_identity_aware_proxy_client", + ) as pre: + pre.assert_not_called() + pb_message = service.DeleteIdentityAwareProxyClientRequest.pb( + service.DeleteIdentityAwareProxyClientRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = service.DeleteIdentityAwareProxyClientRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_identity_aware_proxy_client( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_identity_aware_proxy_client_rest_bad_request( + transport: str = "rest", request_type=service.DeleteIdentityAwareProxyClientRequest +): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/brands/sample2/identityAwareProxyClients/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_identity_aware_proxy_client(request) + + +def test_delete_identity_aware_proxy_client_rest_error(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.IdentityAwareProxyOAuthServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.IdentityAwareProxyOAuthServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IdentityAwareProxyOAuthServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.IdentityAwareProxyOAuthServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = IdentityAwareProxyOAuthServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = IdentityAwareProxyOAuthServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.IdentityAwareProxyOAuthServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IdentityAwareProxyOAuthServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.IdentityAwareProxyOAuthServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = IdentityAwareProxyOAuthServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.IdentityAwareProxyOAuthServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.IdentityAwareProxyOAuthServiceGrpcTransport, + transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport, + transports.IdentityAwareProxyOAuthServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = IdentityAwareProxyOAuthServiceClient.get_transport_class( + transport_name + )( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.IdentityAwareProxyOAuthServiceGrpcTransport, + ) + + +def test_identity_aware_proxy_o_auth_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.IdentityAwareProxyOAuthServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_identity_aware_proxy_o_auth_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.iap_v1.services.identity_aware_proxy_o_auth_service.transports.IdentityAwareProxyOAuthServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.IdentityAwareProxyOAuthServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_brands", + "create_brand", + "get_brand", + "create_identity_aware_proxy_client", + "list_identity_aware_proxy_clients", + "get_identity_aware_proxy_client", + "reset_identity_aware_proxy_client_secret", + "delete_identity_aware_proxy_client", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_identity_aware_proxy_o_auth_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.iap_v1.services.identity_aware_proxy_o_auth_service.transports.IdentityAwareProxyOAuthServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.IdentityAwareProxyOAuthServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_identity_aware_proxy_o_auth_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.iap_v1.services.identity_aware_proxy_o_auth_service.transports.IdentityAwareProxyOAuthServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.IdentityAwareProxyOAuthServiceTransport() + adc.assert_called_once() + + +def test_identity_aware_proxy_o_auth_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + IdentityAwareProxyOAuthServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.IdentityAwareProxyOAuthServiceGrpcTransport, + transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport, + ], +) +def test_identity_aware_proxy_o_auth_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.IdentityAwareProxyOAuthServiceGrpcTransport, + transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport, + transports.IdentityAwareProxyOAuthServiceRestTransport, + ], +) +def test_identity_aware_proxy_o_auth_service_transport_auth_gdch_credentials( + transport_class, +): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.IdentityAwareProxyOAuthServiceGrpcTransport, grpc_helpers), + ( + transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +def test_identity_aware_proxy_o_auth_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "iap.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="iap.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.IdentityAwareProxyOAuthServiceGrpcTransport, + transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport, + ], +) +def test_identity_aware_proxy_o_auth_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_identity_aware_proxy_o_auth_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.IdentityAwareProxyOAuthServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_identity_aware_proxy_o_auth_service_host_no_port(transport_name): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint="iap.googleapis.com"), + transport=transport_name, + ) + assert client.transport._host == ( + "iap.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iap.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_identity_aware_proxy_o_auth_service_host_with_port(transport_name): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="iap.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "iap.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iap.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_identity_aware_proxy_o_auth_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = IdentityAwareProxyOAuthServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = IdentityAwareProxyOAuthServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_brands._session + session2 = client2.transport.list_brands._session + assert session1 != session2 + session1 = client1.transport.create_brand._session + session2 = client2.transport.create_brand._session + assert session1 != session2 + session1 = client1.transport.get_brand._session + session2 = client2.transport.get_brand._session + assert session1 != session2 + session1 = client1.transport.create_identity_aware_proxy_client._session + session2 = client2.transport.create_identity_aware_proxy_client._session + assert session1 != session2 + session1 = client1.transport.list_identity_aware_proxy_clients._session + session2 = client2.transport.list_identity_aware_proxy_clients._session + assert session1 != session2 + session1 = client1.transport.get_identity_aware_proxy_client._session + session2 = client2.transport.get_identity_aware_proxy_client._session + assert session1 != session2 + session1 = client1.transport.reset_identity_aware_proxy_client_secret._session + session2 = client2.transport.reset_identity_aware_proxy_client_secret._session + assert session1 != session2 + session1 = client1.transport.delete_identity_aware_proxy_client._session + session2 = client2.transport.delete_identity_aware_proxy_client._session + assert session1 != session2 + + +def test_identity_aware_proxy_o_auth_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.IdentityAwareProxyOAuthServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_identity_aware_proxy_o_auth_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.IdentityAwareProxyOAuthServiceGrpcTransport, + transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport, + ], +) +def test_identity_aware_proxy_o_auth_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.IdentityAwareProxyOAuthServiceGrpcTransport, + transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport, + ], +) +def test_identity_aware_proxy_o_auth_service_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = IdentityAwareProxyOAuthServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = IdentityAwareProxyOAuthServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = IdentityAwareProxyOAuthServiceClient.parse_common_billing_account_path( + path + ) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = IdentityAwareProxyOAuthServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = IdentityAwareProxyOAuthServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = IdentityAwareProxyOAuthServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = IdentityAwareProxyOAuthServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = IdentityAwareProxyOAuthServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = IdentityAwareProxyOAuthServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = IdentityAwareProxyOAuthServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = IdentityAwareProxyOAuthServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = IdentityAwareProxyOAuthServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = IdentityAwareProxyOAuthServiceClient.common_location_path( + project, location + ) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = IdentityAwareProxyOAuthServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = IdentityAwareProxyOAuthServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.IdentityAwareProxyOAuthServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.IdentityAwareProxyOAuthServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = IdentityAwareProxyOAuthServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = IdentityAwareProxyOAuthServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = IdentityAwareProxyOAuthServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + IdentityAwareProxyOAuthServiceClient, + transports.IdentityAwareProxyOAuthServiceGrpcTransport, + ), + ( + IdentityAwareProxyOAuthServiceAsyncClient, + transports.IdentityAwareProxyOAuthServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-ids/.OwlBot.yaml b/packages/google-cloud-ids/.OwlBot.yaml new file mode 100644 index 000000000000..9d611ec197ce --- /dev/null +++ b/packages/google-cloud-ids/.OwlBot.yaml @@ -0,0 +1,23 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/ids/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-ids/$1 + +begin-after-commit-hash: 70f7f0525414fe4dfeb2fc2e81546b073f83a621 diff --git a/packages/google-cloud-ids/.coveragerc b/packages/google-cloud-ids/.coveragerc new file mode 100644 index 000000000000..7b943eb7bea7 --- /dev/null +++ b/packages/google-cloud-ids/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/ids/__init__.py + google/cloud/ids/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-ids/.flake8 b/packages/google-cloud-ids/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-cloud-ids/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-ids/.gitignore b/packages/google-cloud-ids/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-ids/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-ids/.repo-metadata.json b/packages/google-cloud-ids/.repo-metadata.json new file mode 100644 index 000000000000..4685f3880f96 --- /dev/null +++ b/packages/google-cloud-ids/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "ids", + "name_pretty": "Cloud IDS", + "product_documentation": "https://cloud.google.com/intrusion-detection-system/", + "client_documentation": "https://cloud.google.com/python/docs/reference/ids/latest", + "issue_tracker": "", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-ids", + "api_id": "ids.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "ids", + "api_description": "Cloud IDS is an intrusion detection service that provides threat detection for intrusions, malware, spyware, and command-and-control attacks on your network. Cloud IDS works by creating a Google-managed peered network with mirrored VMs. Traffic in the peered network is mirrored, and then inspected by Palo Alto Networks threat protection technologies to provide advanced threat detection." +} diff --git a/packages/google-cloud-ids/CHANGELOG.md b/packages/google-cloud-ids/CHANGELOG.md new file mode 100644 index 000000000000..cebe773abdb8 --- /dev/null +++ b/packages/google-cloud-ids/CHANGELOG.md @@ -0,0 +1,148 @@ +# Changelog + +## [1.5.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-ids-v1.5.0...google-cloud-ids-v1.5.1) (2023-04-21) + + +### Documentation + +* Update api description for `google-cloud-ids` ([23aeafc](https://github.com/googleapis/google-cloud-python/commit/23aeafc42c35c54a52f84e1425cc1c8a73300ba4)) + +## [1.5.0](https://github.com/googleapis/python-ids/compare/v1.4.1...v1.5.0) (2023-02-17) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#105](https://github.com/googleapis/python-ids/issues/105)) ([f51b4b3](https://github.com/googleapis/python-ids/commit/f51b4b3a317fc7fe05ed8524f79854e6c79f0aad)) + + +### Bug Fixes + +* Add service_yaml_parameters to py_gapic_library BUILD.bazel targets ([#107](https://github.com/googleapis/python-ids/issues/107)) ([d34c23c](https://github.com/googleapis/python-ids/commit/d34c23ccb20f522dfc04d6fe94a99fc3784e26aa)) + +## [1.4.1](https://github.com/googleapis/python-ids/compare/v1.4.0...v1.4.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([b6d8eab](https://github.com/googleapis/python-ids/commit/b6d8eab0d68cbde4eada3140eb07c37a89d09189)) + + +### Documentation + +* Add documentation for enums ([b6d8eab](https://github.com/googleapis/python-ids/commit/b6d8eab0d68cbde4eada3140eb07c37a89d09189)) + +## [1.4.0](https://github.com/googleapis/python-ids/compare/v1.3.0...v1.4.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#97](https://github.com/googleapis/python-ids/issues/97)) ([4d3ebb9](https://github.com/googleapis/python-ids/commit/4d3ebb91fade51bc6a38b974f12f0d4caa321b96)) + +## [1.3.0](https://github.com/googleapis/python-ids/compare/v1.2.4...v1.3.0) (2022-12-14) + + +### Features + +* Add support for `google.cloud.ids.__version__` ([fd8cfa4](https://github.com/googleapis/python-ids/commit/fd8cfa4a17c334407f31e7c2edea4ea52063b176)) +* Add typing to proto.Message based class attributes ([fd8cfa4](https://github.com/googleapis/python-ids/commit/fd8cfa4a17c334407f31e7c2edea4ea52063b176)) + + +### Bug Fixes + +* Add dict typing for client_options ([fd8cfa4](https://github.com/googleapis/python-ids/commit/fd8cfa4a17c334407f31e7c2edea4ea52063b176)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([3e1b599](https://github.com/googleapis/python-ids/commit/3e1b5991d965b52931ff613616f0ed65622efac0)) +* Drop usage of pkg_resources ([3e1b599](https://github.com/googleapis/python-ids/commit/3e1b5991d965b52931ff613616f0ed65622efac0)) +* Fix timeout default values ([3e1b599](https://github.com/googleapis/python-ids/commit/3e1b5991d965b52931ff613616f0ed65622efac0)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([fd8cfa4](https://github.com/googleapis/python-ids/commit/fd8cfa4a17c334407f31e7c2edea4ea52063b176)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([3e1b599](https://github.com/googleapis/python-ids/commit/3e1b5991d965b52931ff613616f0ed65622efac0)) + +## [1.2.4](https://github.com/googleapis/python-ids/compare/v1.2.3...v1.2.4) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#87](https://github.com/googleapis/python-ids/issues/87)) ([c11db6b](https://github.com/googleapis/python-ids/commit/c11db6b87959459f64b6fdab7100aa3692584e44)) + +## [1.2.3](https://github.com/googleapis/python-ids/compare/v1.2.2...v1.2.3) (2022-09-29) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#85](https://github.com/googleapis/python-ids/issues/85)) ([4fdbbe8](https://github.com/googleapis/python-ids/commit/4fdbbe890d28e3999446e10dade2ec2c2ca00abd)) + +## [1.2.2](https://github.com/googleapis/python-ids/compare/v1.2.1...v1.2.2) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#70](https://github.com/googleapis/python-ids/issues/70)) ([3c4ea60](https://github.com/googleapis/python-ids/commit/3c4ea60727ced1be9f2dcee5ffe5c0c1f4851f95)) +* **deps:** require proto-plus >= 1.22.0 ([3c4ea60](https://github.com/googleapis/python-ids/commit/3c4ea60727ced1be9f2dcee5ffe5c0c1f4851f95)) + +## [1.2.1](https://github.com/googleapis/python-ids/compare/v1.2.0...v1.2.1) (2022-07-13) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#63](https://github.com/googleapis/python-ids/issues/63)) ([f664641](https://github.com/googleapis/python-ids/commit/f664641ebcc0aeb7031cc2169fdfe3b0da6d0604)) + +## [1.2.0](https://github.com/googleapis/python-ids/compare/v1.1.2...v1.2.0) (2022-07-12) + + +### Features + +* add audience parameter ([6f977e1](https://github.com/googleapis/python-ids/commit/6f977e1b2b9c8a1e721430fdd7a4abb6f00cbdf1)) + + +### Bug Fixes + +* **deps:** require google-api-core >= 2.8.0 ([#59](https://github.com/googleapis/python-ids/issues/59)) ([6f977e1](https://github.com/googleapis/python-ids/commit/6f977e1b2b9c8a1e721430fdd7a4abb6f00cbdf1)) +* require python 3.7+ ([#61](https://github.com/googleapis/python-ids/issues/61)) ([62079cc](https://github.com/googleapis/python-ids/commit/62079ccf399b78a4da7af94337a099732872ce98)) + +## [1.1.2](https://github.com/googleapis/python-ids/compare/v1.1.1...v1.1.2) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#52](https://github.com/googleapis/python-ids/issues/52)) ([cde8a52](https://github.com/googleapis/python-ids/commit/cde8a52a5152df0d7f2858ab4733769a024eb9aa)) + + +### Documentation + +* fix changelog header to consistent size ([#51](https://github.com/googleapis/python-ids/issues/51)) ([24267af](https://github.com/googleapis/python-ids/commit/24267afe4003f33c2f7505ac69c23f352136c55a)) + +## [1.1.1](https://github.com/googleapis/python-ids/compare/v1.1.0...v1.1.1) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#29](https://github.com/googleapis/python-ids/issues/29)) ([b006c4d](https://github.com/googleapis/python-ids/commit/b006c4d9cc4fb9983aa901332db4f1247eea6900)) + +## [1.1.0](https://github.com/googleapis/python-ids/compare/v1.0.0...v1.1.0) (2022-02-26) + + +### Features + +* add api key support ([#15](https://github.com/googleapis/python-ids/issues/15)) ([8e562db](https://github.com/googleapis/python-ids/commit/8e562db176932dd9413e10ada79d8460b647a56f)) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([31be484](https://github.com/googleapis/python-ids/commit/31be484920c0cedec5ede6e97e068b1b113f1ee4)) + +## [1.0.0](https://github.com/googleapis/python-ids/compare/v0.1.0...v1.0.0) (2022-01-24) + + +### Features + +* bump release level to production/stable ([#5](https://github.com/googleapis/python-ids/issues/5)) ([ad90dd9](https://github.com/googleapis/python-ids/commit/ad90dd9e6064d2eb8504f38df2aa1f882b516459)) + +## 0.1.0 (2021-11-12) + + +### Features + +* generate v1 ([12a0363](https://www.github.com/googleapis/python-ids/commit/12a036387a20072cf8ab7999c360fac7989de788)) diff --git a/packages/google-cloud-ids/CODE_OF_CONDUCT.md b/packages/google-cloud-ids/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-ids/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-ids/CONTRIBUTING.rst b/packages/google-cloud-ids/CONTRIBUTING.rst new file mode 100644 index 000000000000..265c274d1d90 --- /dev/null +++ b/packages/google-cloud-ids/CONTRIBUTING.rst @@ -0,0 +1,281 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-ids + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +We also explicitly decided to support Python 3 beginning with version 3.7. +Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-ids/LICENSE b/packages/google-cloud-ids/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-ids/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-ids/MANIFEST.in b/packages/google-cloud-ids/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-cloud-ids/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-ids/README.rst b/packages/google-cloud-ids/README.rst new file mode 100644 index 000000000000..55a1b7fea6eb --- /dev/null +++ b/packages/google-cloud-ids/README.rst @@ -0,0 +1,107 @@ +Python Client for Cloud IDS +=========================== + +|stable| |pypi| |versions| + +`Cloud IDS`_: Cloud IDS is an intrusion detection service that provides threat detection for intrusions, malware, spyware, and command-and-control attacks on your network. Cloud IDS works by creating a Google-managed peered network with mirrored VMs. Traffic in the peered network is mirrored, and then inspected by Palo Alto Networks threat protection technologies to provide advanced threat detection. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-ids.svg + :target: https://pypi.org/project/google-cloud-ids/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-ids.svg + :target: https://pypi.org/project/google-cloud-ids/ +.. _Cloud IDS: https://cloud.google.com/intrusion-detection-system/ +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/ids/latest +.. _Product Documentation: https://cloud.google.com/intrusion-detection-system/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud IDS.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud IDS.: https://cloud.google.com/intrusion-detection-system/ +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-ids + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-ids + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud IDS + to see other available methods on the client. +- Read the `Cloud IDS Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Cloud IDS Product documentation: https://cloud.google.com/intrusion-detection-system/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-ids/SECURITY.md b/packages/google-cloud-ids/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-ids/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-ids/docs/CHANGELOG.md b/packages/google-cloud-ids/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-ids/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-ids/docs/README.rst b/packages/google-cloud-ids/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-ids/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-ids/docs/_static/custom.css b/packages/google-cloud-ids/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-ids/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-ids/docs/_templates/layout.html b/packages/google-cloud-ids/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-ids/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-ids/docs/conf.py b/packages/google-cloud-ids/docs/conf.py new file mode 100644 index 000000000000..7ceac2a4092e --- /dev/null +++ b/packages/google-cloud-ids/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-ids documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-ids" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-ids", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-ids-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-ids.tex", + "google-cloud-ids Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-ids", + "google-cloud-ids Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-ids", + "google-cloud-ids Documentation", + author, + "google-cloud-ids", + "google-cloud-ids Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-ids/docs/ids_v1/ids.rst b/packages/google-cloud-ids/docs/ids_v1/ids.rst new file mode 100644 index 000000000000..1f7fcf3769f6 --- /dev/null +++ b/packages/google-cloud-ids/docs/ids_v1/ids.rst @@ -0,0 +1,10 @@ +IDS +--------------------- + +.. automodule:: google.cloud.ids_v1.services.ids + :members: + :inherited-members: + +.. automodule:: google.cloud.ids_v1.services.ids.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-ids/docs/ids_v1/services.rst b/packages/google-cloud-ids/docs/ids_v1/services.rst new file mode 100644 index 000000000000..ea1fde188f7b --- /dev/null +++ b/packages/google-cloud-ids/docs/ids_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Ids v1 API +==================================== +.. toctree:: + :maxdepth: 2 + + ids diff --git a/packages/google-cloud-ids/docs/ids_v1/types.rst b/packages/google-cloud-ids/docs/ids_v1/types.rst new file mode 100644 index 000000000000..336fc07d183a --- /dev/null +++ b/packages/google-cloud-ids/docs/ids_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Ids v1 API +================================= + +.. automodule:: google.cloud.ids_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-ids/docs/index.rst b/packages/google-cloud-ids/docs/index.rst new file mode 100644 index 000000000000..9bfb85d34d3c --- /dev/null +++ b/packages/google-cloud-ids/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + ids_v1/services + ids_v1/types + + +Changelog +--------- + +For a list of all ``google-cloud-ids`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-ids/docs/multiprocessing.rst b/packages/google-cloud-ids/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-ids/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-ids/google/cloud/ids/__init__.py b/packages/google-cloud-ids/google/cloud/ids/__init__.py new file mode 100644 index 000000000000..d64d112f7dc8 --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids/__init__.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.ids import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.ids_v1.services.ids.async_client import IDSAsyncClient +from google.cloud.ids_v1.services.ids.client import IDSClient +from google.cloud.ids_v1.types.ids import ( + CreateEndpointRequest, + DeleteEndpointRequest, + Endpoint, + GetEndpointRequest, + ListEndpointsRequest, + ListEndpointsResponse, + OperationMetadata, +) + +__all__ = ( + "IDSClient", + "IDSAsyncClient", + "CreateEndpointRequest", + "DeleteEndpointRequest", + "Endpoint", + "GetEndpointRequest", + "ListEndpointsRequest", + "ListEndpointsResponse", + "OperationMetadata", +) diff --git a/packages/google-cloud-ids/google/cloud/ids/gapic_version.py b/packages/google-cloud-ids/google/cloud/ids/gapic_version.py new file mode 100644 index 000000000000..69ff013987c9 --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.5.1" # {x-release-please-version} diff --git a/packages/google-cloud-ids/google/cloud/ids/py.typed b/packages/google-cloud-ids/google/cloud/ids/py.typed new file mode 100644 index 000000000000..9952b3352f6f --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-ids package uses inline types. diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/__init__.py b/packages/google-cloud-ids/google/cloud/ids_v1/__init__.py new file mode 100644 index 000000000000..24b352cbf0d2 --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/__init__.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.ids_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.ids import IDSAsyncClient, IDSClient +from .types.ids import ( + CreateEndpointRequest, + DeleteEndpointRequest, + Endpoint, + GetEndpointRequest, + ListEndpointsRequest, + ListEndpointsResponse, + OperationMetadata, +) + +__all__ = ( + "IDSAsyncClient", + "CreateEndpointRequest", + "DeleteEndpointRequest", + "Endpoint", + "GetEndpointRequest", + "IDSClient", + "ListEndpointsRequest", + "ListEndpointsResponse", + "OperationMetadata", +) diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/gapic_metadata.json b/packages/google-cloud-ids/google/cloud/ids_v1/gapic_metadata.json new file mode 100644 index 000000000000..01791ccb9f97 --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/gapic_metadata.json @@ -0,0 +1,88 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.ids_v1", + "protoPackage": "google.cloud.ids.v1", + "schema": "1.0", + "services": { + "IDS": { + "clients": { + "grpc": { + "libraryClient": "IDSClient", + "rpcs": { + "CreateEndpoint": { + "methods": [ + "create_endpoint" + ] + }, + "DeleteEndpoint": { + "methods": [ + "delete_endpoint" + ] + }, + "GetEndpoint": { + "methods": [ + "get_endpoint" + ] + }, + "ListEndpoints": { + "methods": [ + "list_endpoints" + ] + } + } + }, + "grpc-async": { + "libraryClient": "IDSAsyncClient", + "rpcs": { + "CreateEndpoint": { + "methods": [ + "create_endpoint" + ] + }, + "DeleteEndpoint": { + "methods": [ + "delete_endpoint" + ] + }, + "GetEndpoint": { + "methods": [ + "get_endpoint" + ] + }, + "ListEndpoints": { + "methods": [ + "list_endpoints" + ] + } + } + }, + "rest": { + "libraryClient": "IDSClient", + "rpcs": { + "CreateEndpoint": { + "methods": [ + "create_endpoint" + ] + }, + "DeleteEndpoint": { + "methods": [ + "delete_endpoint" + ] + }, + "GetEndpoint": { + "methods": [ + "get_endpoint" + ] + }, + "ListEndpoints": { + "methods": [ + "list_endpoints" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/gapic_version.py b/packages/google-cloud-ids/google/cloud/ids_v1/gapic_version.py new file mode 100644 index 000000000000..69ff013987c9 --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.5.1" # {x-release-please-version} diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/py.typed b/packages/google-cloud-ids/google/cloud/ids_v1/py.typed new file mode 100644 index 000000000000..9952b3352f6f --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-ids package uses inline types. diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/services/__init__.py b/packages/google-cloud-ids/google/cloud/ids_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/__init__.py b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/__init__.py new file mode 100644 index 000000000000..a1ea5e1a09ec --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import IDSAsyncClient +from .client import IDSClient + +__all__ = ( + "IDSClient", + "IDSAsyncClient", +) diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/async_client.py b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/async_client.py new file mode 100644 index 000000000000..94dbdf1f1007 --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/async_client.py @@ -0,0 +1,722 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.ids_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.ids_v1.services.ids import pagers +from google.cloud.ids_v1.types import ids + +from .client import IDSClient +from .transports.base import DEFAULT_CLIENT_INFO, IDSTransport +from .transports.grpc_asyncio import IDSGrpcAsyncIOTransport + + +class IDSAsyncClient: + """The IDS Service""" + + _client: IDSClient + + DEFAULT_ENDPOINT = IDSClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = IDSClient.DEFAULT_MTLS_ENDPOINT + + endpoint_path = staticmethod(IDSClient.endpoint_path) + parse_endpoint_path = staticmethod(IDSClient.parse_endpoint_path) + common_billing_account_path = staticmethod(IDSClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod( + IDSClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(IDSClient.common_folder_path) + parse_common_folder_path = staticmethod(IDSClient.parse_common_folder_path) + common_organization_path = staticmethod(IDSClient.common_organization_path) + parse_common_organization_path = staticmethod( + IDSClient.parse_common_organization_path + ) + common_project_path = staticmethod(IDSClient.common_project_path) + parse_common_project_path = staticmethod(IDSClient.parse_common_project_path) + common_location_path = staticmethod(IDSClient.common_location_path) + parse_common_location_path = staticmethod(IDSClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IDSAsyncClient: The constructed client. + """ + return IDSClient.from_service_account_info.__func__(IDSAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IDSAsyncClient: The constructed client. + """ + return IDSClient.from_service_account_file.__func__(IDSAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return IDSClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> IDSTransport: + """Returns the transport used by the client instance. + + Returns: + IDSTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(IDSClient).get_transport_class, type(IDSClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, IDSTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the ids client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.IDSTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = IDSClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_endpoints( + self, + request: Optional[Union[ids.ListEndpointsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEndpointsAsyncPager: + r"""Lists Endpoints in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import ids_v1 + + async def sample_list_endpoints(): + # Create a client + client = ids_v1.IDSAsyncClient() + + # Initialize request argument(s) + request = ids_v1.ListEndpointsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_endpoints(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.ids_v1.types.ListEndpointsRequest, dict]]): + The request object. + parent (:class:`str`): + Required. The parent, which owns this + collection of endpoints. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.ids_v1.services.ids.pagers.ListEndpointsAsyncPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = ids.ListEndpointsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_endpoints, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEndpointsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_endpoint( + self, + request: Optional[Union[ids.GetEndpointRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> ids.Endpoint: + r"""Gets details of a single Endpoint. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import ids_v1 + + async def sample_get_endpoint(): + # Create a client + client = ids_v1.IDSAsyncClient() + + # Initialize request argument(s) + request = ids_v1.GetEndpointRequest( + name="name_value", + ) + + # Make the request + response = await client.get_endpoint(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.ids_v1.types.GetEndpointRequest, dict]]): + The request object. + name (:class:`str`): + Required. The name of the endpoint to retrieve. Format: + ``projects/{project}/locations/{location}/endpoints/{endpoint}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.ids_v1.types.Endpoint: + Endpoint describes a single IDS + endpoint. It defines a forwarding rule + to which packets can be sent for IDS + inspection. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = ids.GetEndpointRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_endpoint, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_endpoint( + self, + request: Optional[Union[ids.CreateEndpointRequest, dict]] = None, + *, + parent: Optional[str] = None, + endpoint: Optional[ids.Endpoint] = None, + endpoint_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Endpoint in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import ids_v1 + + async def sample_create_endpoint(): + # Create a client + client = ids_v1.IDSAsyncClient() + + # Initialize request argument(s) + endpoint = ids_v1.Endpoint() + endpoint.network = "network_value" + endpoint.severity = "CRITICAL" + + request = ids_v1.CreateEndpointRequest( + parent="parent_value", + endpoint_id="endpoint_id_value", + endpoint=endpoint, + ) + + # Make the request + operation = client.create_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.ids_v1.types.CreateEndpointRequest, dict]]): + The request object. + parent (:class:`str`): + Required. The endpoint's parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + endpoint (:class:`google.cloud.ids_v1.types.Endpoint`): + Required. The endpoint to create. + This corresponds to the ``endpoint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + endpoint_id (:class:`str`): + Required. The endpoint identifier. This will be part of + the endpoint's resource name. This value must start with + a lowercase letter followed by up to 62 lowercase + letters, numbers, or hyphens, and cannot end with a + hyphen. Values that do not match this pattern will + trigger an INVALID_ARGUMENT error. + + This corresponds to the ``endpoint_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.ids_v1.types.Endpoint` Endpoint describes a single IDS endpoint. It defines a forwarding rule to + which packets can be sent for IDS inspection. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, endpoint, endpoint_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = ids.CreateEndpointRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if endpoint is not None: + request.endpoint = endpoint + if endpoint_id is not None: + request.endpoint_id = endpoint_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_endpoint, + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + ids.Endpoint, + metadata_type=ids.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_endpoint( + self, + request: Optional[Union[ids.DeleteEndpointRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Endpoint. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import ids_v1 + + async def sample_delete_endpoint(): + # Create a client + client = ids_v1.IDSAsyncClient() + + # Initialize request argument(s) + request = ids_v1.DeleteEndpointRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.ids_v1.types.DeleteEndpointRequest, dict]]): + The request object. + name (:class:`str`): + Required. The name of the endpoint to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = ids.DeleteEndpointRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_endpoint, + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=ids.OperationMetadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("IDSAsyncClient",) diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/client.py b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/client.py new file mode 100644 index 000000000000..2d5ece321f12 --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/client.py @@ -0,0 +1,948 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.ids_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.ids_v1.services.ids import pagers +from google.cloud.ids_v1.types import ids + +from .transports.base import DEFAULT_CLIENT_INFO, IDSTransport +from .transports.grpc import IDSGrpcTransport +from .transports.grpc_asyncio import IDSGrpcAsyncIOTransport +from .transports.rest import IDSRestTransport + + +class IDSClientMeta(type): + """Metaclass for the IDS client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[IDSTransport]] + _transport_registry["grpc"] = IDSGrpcTransport + _transport_registry["grpc_asyncio"] = IDSGrpcAsyncIOTransport + _transport_registry["rest"] = IDSRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[IDSTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class IDSClient(metaclass=IDSClientMeta): + """The IDS Service""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "ids.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IDSClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IDSClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> IDSTransport: + """Returns the transport used by the client instance. + + Returns: + IDSTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def endpoint_path( + project: str, + location: str, + endpoint: str, + ) -> str: + """Returns a fully-qualified endpoint string.""" + return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( + project=project, + location=location, + endpoint=endpoint, + ) + + @staticmethod + def parse_endpoint_path(path: str) -> Dict[str, str]: + """Parses a endpoint path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/endpoints/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, IDSTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the ids client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, IDSTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, IDSTransport): + # transport is a IDSTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_endpoints( + self, + request: Optional[Union[ids.ListEndpointsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEndpointsPager: + r"""Lists Endpoints in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import ids_v1 + + def sample_list_endpoints(): + # Create a client + client = ids_v1.IDSClient() + + # Initialize request argument(s) + request = ids_v1.ListEndpointsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_endpoints(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.ids_v1.types.ListEndpointsRequest, dict]): + The request object. + parent (str): + Required. The parent, which owns this + collection of endpoints. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.ids_v1.services.ids.pagers.ListEndpointsPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a ids.ListEndpointsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, ids.ListEndpointsRequest): + request = ids.ListEndpointsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_endpoints] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEndpointsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_endpoint( + self, + request: Optional[Union[ids.GetEndpointRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> ids.Endpoint: + r"""Gets details of a single Endpoint. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import ids_v1 + + def sample_get_endpoint(): + # Create a client + client = ids_v1.IDSClient() + + # Initialize request argument(s) + request = ids_v1.GetEndpointRequest( + name="name_value", + ) + + # Make the request + response = client.get_endpoint(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.ids_v1.types.GetEndpointRequest, dict]): + The request object. + name (str): + Required. The name of the endpoint to retrieve. Format: + ``projects/{project}/locations/{location}/endpoints/{endpoint}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.ids_v1.types.Endpoint: + Endpoint describes a single IDS + endpoint. It defines a forwarding rule + to which packets can be sent for IDS + inspection. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a ids.GetEndpointRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, ids.GetEndpointRequest): + request = ids.GetEndpointRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_endpoint] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_endpoint( + self, + request: Optional[Union[ids.CreateEndpointRequest, dict]] = None, + *, + parent: Optional[str] = None, + endpoint: Optional[ids.Endpoint] = None, + endpoint_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Endpoint in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import ids_v1 + + def sample_create_endpoint(): + # Create a client + client = ids_v1.IDSClient() + + # Initialize request argument(s) + endpoint = ids_v1.Endpoint() + endpoint.network = "network_value" + endpoint.severity = "CRITICAL" + + request = ids_v1.CreateEndpointRequest( + parent="parent_value", + endpoint_id="endpoint_id_value", + endpoint=endpoint, + ) + + # Make the request + operation = client.create_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.ids_v1.types.CreateEndpointRequest, dict]): + The request object. + parent (str): + Required. The endpoint's parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + endpoint (google.cloud.ids_v1.types.Endpoint): + Required. The endpoint to create. + This corresponds to the ``endpoint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + endpoint_id (str): + Required. The endpoint identifier. This will be part of + the endpoint's resource name. This value must start with + a lowercase letter followed by up to 62 lowercase + letters, numbers, or hyphens, and cannot end with a + hyphen. Values that do not match this pattern will + trigger an INVALID_ARGUMENT error. + + This corresponds to the ``endpoint_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.ids_v1.types.Endpoint` Endpoint describes a single IDS endpoint. It defines a forwarding rule to + which packets can be sent for IDS inspection. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, endpoint, endpoint_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a ids.CreateEndpointRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, ids.CreateEndpointRequest): + request = ids.CreateEndpointRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if endpoint is not None: + request.endpoint = endpoint + if endpoint_id is not None: + request.endpoint_id = endpoint_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_endpoint] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + ids.Endpoint, + metadata_type=ids.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_endpoint( + self, + request: Optional[Union[ids.DeleteEndpointRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Endpoint. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import ids_v1 + + def sample_delete_endpoint(): + # Create a client + client = ids_v1.IDSClient() + + # Initialize request argument(s) + request = ids_v1.DeleteEndpointRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.ids_v1.types.DeleteEndpointRequest, dict]): + The request object. + name (str): + Required. The name of the endpoint to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a ids.DeleteEndpointRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, ids.DeleteEndpointRequest): + request = ids.DeleteEndpointRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_endpoint] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=ids.OperationMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "IDSClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("IDSClient",) diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/pagers.py b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/pagers.py new file mode 100644 index 000000000000..5b79e97586f3 --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.ids_v1.types import ids + + +class ListEndpointsPager: + """A pager for iterating through ``list_endpoints`` requests. + + This class thinly wraps an initial + :class:`google.cloud.ids_v1.types.ListEndpointsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``endpoints`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEndpoints`` requests and continue to iterate + through the ``endpoints`` field on the + corresponding responses. + + All the usual :class:`google.cloud.ids_v1.types.ListEndpointsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., ids.ListEndpointsResponse], + request: ids.ListEndpointsRequest, + response: ids.ListEndpointsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.ids_v1.types.ListEndpointsRequest): + The initial request object. + response (google.cloud.ids_v1.types.ListEndpointsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ids.ListEndpointsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[ids.ListEndpointsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[ids.Endpoint]: + for page in self.pages: + yield from page.endpoints + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListEndpointsAsyncPager: + """A pager for iterating through ``list_endpoints`` requests. + + This class thinly wraps an initial + :class:`google.cloud.ids_v1.types.ListEndpointsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``endpoints`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEndpoints`` requests and continue to iterate + through the ``endpoints`` field on the + corresponding responses. + + All the usual :class:`google.cloud.ids_v1.types.ListEndpointsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[ids.ListEndpointsResponse]], + request: ids.ListEndpointsRequest, + response: ids.ListEndpointsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.ids_v1.types.ListEndpointsRequest): + The initial request object. + response (google.cloud.ids_v1.types.ListEndpointsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ids.ListEndpointsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[ids.ListEndpointsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[ids.Endpoint]: + async def async_generator(): + async for page in self.pages: + for response in page.endpoints: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/__init__.py b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/__init__.py new file mode 100644 index 000000000000..5f0004bf68b6 --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import IDSTransport +from .grpc import IDSGrpcTransport +from .grpc_asyncio import IDSGrpcAsyncIOTransport +from .rest import IDSRestInterceptor, IDSRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[IDSTransport]] +_transport_registry["grpc"] = IDSGrpcTransport +_transport_registry["grpc_asyncio"] = IDSGrpcAsyncIOTransport +_transport_registry["rest"] = IDSRestTransport + +__all__ = ( + "IDSTransport", + "IDSGrpcTransport", + "IDSGrpcAsyncIOTransport", + "IDSRestTransport", + "IDSRestInterceptor", +) diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/base.py b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/base.py new file mode 100644 index 000000000000..139931896bc5 --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/base.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.ids_v1 import gapic_version as package_version +from google.cloud.ids_v1.types import ids + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class IDSTransport(abc.ABC): + """Abstract transport class for IDS.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "ids.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_endpoints: gapic_v1.method.wrap_method( + self.list_endpoints, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_endpoint: gapic_v1.method.wrap_method( + self.get_endpoint, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_endpoint: gapic_v1.method.wrap_method( + self.create_endpoint, + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_endpoint: gapic_v1.method.wrap_method( + self.delete_endpoint, + default_timeout=3600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_endpoints( + self, + ) -> Callable[ + [ids.ListEndpointsRequest], + Union[ids.ListEndpointsResponse, Awaitable[ids.ListEndpointsResponse]], + ]: + raise NotImplementedError() + + @property + def get_endpoint( + self, + ) -> Callable[ + [ids.GetEndpointRequest], Union[ids.Endpoint, Awaitable[ids.Endpoint]] + ]: + raise NotImplementedError() + + @property + def create_endpoint( + self, + ) -> Callable[ + [ids.CreateEndpointRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_endpoint( + self, + ) -> Callable[ + [ids.DeleteEndpointRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("IDSTransport",) diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/grpc.py b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/grpc.py new file mode 100644 index 000000000000..a0c3073e5764 --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/grpc.py @@ -0,0 +1,358 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.ids_v1.types import ids + +from .base import DEFAULT_CLIENT_INFO, IDSTransport + + +class IDSGrpcTransport(IDSTransport): + """gRPC backend transport for IDS. + + The IDS Service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "ids.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "ids.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_endpoints( + self, + ) -> Callable[[ids.ListEndpointsRequest], ids.ListEndpointsResponse]: + r"""Return a callable for the list endpoints method over gRPC. + + Lists Endpoints in a given project and location. + + Returns: + Callable[[~.ListEndpointsRequest], + ~.ListEndpointsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_endpoints" not in self._stubs: + self._stubs["list_endpoints"] = self.grpc_channel.unary_unary( + "/google.cloud.ids.v1.IDS/ListEndpoints", + request_serializer=ids.ListEndpointsRequest.serialize, + response_deserializer=ids.ListEndpointsResponse.deserialize, + ) + return self._stubs["list_endpoints"] + + @property + def get_endpoint(self) -> Callable[[ids.GetEndpointRequest], ids.Endpoint]: + r"""Return a callable for the get endpoint method over gRPC. + + Gets details of a single Endpoint. + + Returns: + Callable[[~.GetEndpointRequest], + ~.Endpoint]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_endpoint" not in self._stubs: + self._stubs["get_endpoint"] = self.grpc_channel.unary_unary( + "/google.cloud.ids.v1.IDS/GetEndpoint", + request_serializer=ids.GetEndpointRequest.serialize, + response_deserializer=ids.Endpoint.deserialize, + ) + return self._stubs["get_endpoint"] + + @property + def create_endpoint( + self, + ) -> Callable[[ids.CreateEndpointRequest], operations_pb2.Operation]: + r"""Return a callable for the create endpoint method over gRPC. + + Creates a new Endpoint in a given project and + location. + + Returns: + Callable[[~.CreateEndpointRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_endpoint" not in self._stubs: + self._stubs["create_endpoint"] = self.grpc_channel.unary_unary( + "/google.cloud.ids.v1.IDS/CreateEndpoint", + request_serializer=ids.CreateEndpointRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_endpoint"] + + @property + def delete_endpoint( + self, + ) -> Callable[[ids.DeleteEndpointRequest], operations_pb2.Operation]: + r"""Return a callable for the delete endpoint method over gRPC. + + Deletes a single Endpoint. + + Returns: + Callable[[~.DeleteEndpointRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_endpoint" not in self._stubs: + self._stubs["delete_endpoint"] = self.grpc_channel.unary_unary( + "/google.cloud.ids.v1.IDS/DeleteEndpoint", + request_serializer=ids.DeleteEndpointRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_endpoint"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("IDSGrpcTransport",) diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/grpc_asyncio.py b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/grpc_asyncio.py new file mode 100644 index 000000000000..3d0d3bba0515 --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/grpc_asyncio.py @@ -0,0 +1,361 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.ids_v1.types import ids + +from .base import DEFAULT_CLIENT_INFO, IDSTransport +from .grpc import IDSGrpcTransport + + +class IDSGrpcAsyncIOTransport(IDSTransport): + """gRPC AsyncIO backend transport for IDS. + + The IDS Service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "ids.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "ids.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_endpoints( + self, + ) -> Callable[[ids.ListEndpointsRequest], Awaitable[ids.ListEndpointsResponse]]: + r"""Return a callable for the list endpoints method over gRPC. + + Lists Endpoints in a given project and location. + + Returns: + Callable[[~.ListEndpointsRequest], + Awaitable[~.ListEndpointsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_endpoints" not in self._stubs: + self._stubs["list_endpoints"] = self.grpc_channel.unary_unary( + "/google.cloud.ids.v1.IDS/ListEndpoints", + request_serializer=ids.ListEndpointsRequest.serialize, + response_deserializer=ids.ListEndpointsResponse.deserialize, + ) + return self._stubs["list_endpoints"] + + @property + def get_endpoint( + self, + ) -> Callable[[ids.GetEndpointRequest], Awaitable[ids.Endpoint]]: + r"""Return a callable for the get endpoint method over gRPC. + + Gets details of a single Endpoint. + + Returns: + Callable[[~.GetEndpointRequest], + Awaitable[~.Endpoint]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_endpoint" not in self._stubs: + self._stubs["get_endpoint"] = self.grpc_channel.unary_unary( + "/google.cloud.ids.v1.IDS/GetEndpoint", + request_serializer=ids.GetEndpointRequest.serialize, + response_deserializer=ids.Endpoint.deserialize, + ) + return self._stubs["get_endpoint"] + + @property + def create_endpoint( + self, + ) -> Callable[[ids.CreateEndpointRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create endpoint method over gRPC. + + Creates a new Endpoint in a given project and + location. + + Returns: + Callable[[~.CreateEndpointRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_endpoint" not in self._stubs: + self._stubs["create_endpoint"] = self.grpc_channel.unary_unary( + "/google.cloud.ids.v1.IDS/CreateEndpoint", + request_serializer=ids.CreateEndpointRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_endpoint"] + + @property + def delete_endpoint( + self, + ) -> Callable[[ids.DeleteEndpointRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete endpoint method over gRPC. + + Deletes a single Endpoint. + + Returns: + Callable[[~.DeleteEndpointRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_endpoint" not in self._stubs: + self._stubs["delete_endpoint"] = self.grpc_channel.unary_unary( + "/google.cloud.ids.v1.IDS/DeleteEndpoint", + request_serializer=ids.DeleteEndpointRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_endpoint"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("IDSGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/rest.py b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/rest.py new file mode 100644 index 000000000000..f5da861b4553 --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/rest.py @@ -0,0 +1,744 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.ids_v1.types import ids + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import IDSTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class IDSRestInterceptor: + """Interceptor for IDS. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the IDSRestTransport. + + .. code-block:: python + class MyCustomIDSInterceptor(IDSRestInterceptor): + def pre_create_endpoint(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_endpoint(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_endpoint(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_endpoint(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_endpoint(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_endpoint(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_endpoints(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_endpoints(self, response): + logging.log(f"Received response: {response}") + return response + + transport = IDSRestTransport(interceptor=MyCustomIDSInterceptor()) + client = IDSClient(transport=transport) + + + """ + + def pre_create_endpoint( + self, request: ids.CreateEndpointRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[ids.CreateEndpointRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_endpoint + + Override in a subclass to manipulate the request or metadata + before they are sent to the IDS server. + """ + return request, metadata + + def post_create_endpoint( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_endpoint + + Override in a subclass to manipulate the response + after it is returned by the IDS server but before + it is returned to user code. + """ + return response + + def pre_delete_endpoint( + self, request: ids.DeleteEndpointRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[ids.DeleteEndpointRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_endpoint + + Override in a subclass to manipulate the request or metadata + before they are sent to the IDS server. + """ + return request, metadata + + def post_delete_endpoint( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_endpoint + + Override in a subclass to manipulate the response + after it is returned by the IDS server but before + it is returned to user code. + """ + return response + + def pre_get_endpoint( + self, request: ids.GetEndpointRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[ids.GetEndpointRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_endpoint + + Override in a subclass to manipulate the request or metadata + before they are sent to the IDS server. + """ + return request, metadata + + def post_get_endpoint(self, response: ids.Endpoint) -> ids.Endpoint: + """Post-rpc interceptor for get_endpoint + + Override in a subclass to manipulate the response + after it is returned by the IDS server but before + it is returned to user code. + """ + return response + + def pre_list_endpoints( + self, request: ids.ListEndpointsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[ids.ListEndpointsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_endpoints + + Override in a subclass to manipulate the request or metadata + before they are sent to the IDS server. + """ + return request, metadata + + def post_list_endpoints( + self, response: ids.ListEndpointsResponse + ) -> ids.ListEndpointsResponse: + """Post-rpc interceptor for list_endpoints + + Override in a subclass to manipulate the response + after it is returned by the IDS server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class IDSRestStub: + _session: AuthorizedSession + _host: str + _interceptor: IDSRestInterceptor + + +class IDSRestTransport(IDSTransport): + """REST backend transport for IDS. + + The IDS Service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "ids.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[IDSRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or IDSRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateEndpoint(IDSRestStub): + def __hash__(self): + return hash("CreateEndpoint") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "endpointId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: ids.CreateEndpointRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create endpoint method over HTTP. + + Args: + request (~.ids.CreateEndpointRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/endpoints", + "body": "endpoint", + }, + ] + request, metadata = self._interceptor.pre_create_endpoint(request, metadata) + pb_request = ids.CreateEndpointRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_endpoint(resp) + return resp + + class _DeleteEndpoint(IDSRestStub): + def __hash__(self): + return hash("DeleteEndpoint") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: ids.DeleteEndpointRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete endpoint method over HTTP. + + Args: + request (~.ids.DeleteEndpointRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/endpoints/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_endpoint(request, metadata) + pb_request = ids.DeleteEndpointRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_endpoint(resp) + return resp + + class _GetEndpoint(IDSRestStub): + def __hash__(self): + return hash("GetEndpoint") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: ids.GetEndpointRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> ids.Endpoint: + r"""Call the get endpoint method over HTTP. + + Args: + request (~.ids.GetEndpointRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.ids.Endpoint: + Endpoint describes a single IDS + endpoint. It defines a forwarding rule + to which packets can be sent for IDS + inspection. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/endpoints/*}", + }, + ] + request, metadata = self._interceptor.pre_get_endpoint(request, metadata) + pb_request = ids.GetEndpointRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ids.Endpoint() + pb_resp = ids.Endpoint.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_endpoint(resp) + return resp + + class _ListEndpoints(IDSRestStub): + def __hash__(self): + return hash("ListEndpoints") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: ids.ListEndpointsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> ids.ListEndpointsResponse: + r"""Call the list endpoints method over HTTP. + + Args: + request (~.ids.ListEndpointsRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.ids.ListEndpointsResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/endpoints", + }, + ] + request, metadata = self._interceptor.pre_list_endpoints(request, metadata) + pb_request = ids.ListEndpointsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ids.ListEndpointsResponse() + pb_resp = ids.ListEndpointsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_endpoints(resp) + return resp + + @property + def create_endpoint( + self, + ) -> Callable[[ids.CreateEndpointRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEndpoint(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_endpoint( + self, + ) -> Callable[[ids.DeleteEndpointRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEndpoint(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_endpoint(self) -> Callable[[ids.GetEndpointRequest], ids.Endpoint]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEndpoint(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_endpoints( + self, + ) -> Callable[[ids.ListEndpointsRequest], ids.ListEndpointsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEndpoints(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("IDSRestTransport",) diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/types/__init__.py b/packages/google-cloud-ids/google/cloud/ids_v1/types/__init__.py new file mode 100644 index 000000000000..9fb319c613e4 --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/types/__init__.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .ids import ( + CreateEndpointRequest, + DeleteEndpointRequest, + Endpoint, + GetEndpointRequest, + ListEndpointsRequest, + ListEndpointsResponse, + OperationMetadata, +) + +__all__ = ( + "CreateEndpointRequest", + "DeleteEndpointRequest", + "Endpoint", + "GetEndpointRequest", + "ListEndpointsRequest", + "ListEndpointsResponse", + "OperationMetadata", +) diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/types/ids.py b/packages/google-cloud-ids/google/cloud/ids_v1/types/ids.py new file mode 100644 index 000000000000..831ffd61e65b --- /dev/null +++ b/packages/google-cloud-ids/google/cloud/ids_v1/types/ids.py @@ -0,0 +1,415 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.ids.v1", + manifest={ + "Endpoint", + "ListEndpointsRequest", + "ListEndpointsResponse", + "GetEndpointRequest", + "CreateEndpointRequest", + "DeleteEndpointRequest", + "OperationMetadata", + }, +) + + +class Endpoint(proto.Message): + r"""Endpoint describes a single IDS endpoint. It defines a + forwarding rule to which packets can be sent for IDS inspection. + + Attributes: + name (str): + Output only. The name of the endpoint. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The create time timestamp. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The update time timestamp. + labels (MutableMapping[str, str]): + The labels of the endpoint. + network (str): + Required. The fully qualified URL of the + network to which the IDS Endpoint is attached. + endpoint_forwarding_rule (str): + Output only. The fully qualified URL of the + endpoint's ILB Forwarding Rule. + endpoint_ip (str): + Output only. The IP address of the IDS + Endpoint's ILB. + description (str): + User-provided description of the endpoint + severity (google.cloud.ids_v1.types.Endpoint.Severity): + Required. Lowest threat severity that this + endpoint will alert on. + state (google.cloud.ids_v1.types.Endpoint.State): + Output only. Current state of the endpoint. + traffic_logs (bool): + Whether the endpoint should report traffic + logs in addition to threat logs. + """ + + class Severity(proto.Enum): + r"""Threat severity levels. + + Values: + SEVERITY_UNSPECIFIED (0): + Not set. + INFORMATIONAL (1): + Informational alerts. + LOW (2): + Low severity alerts. + MEDIUM (3): + Medium severity alerts. + HIGH (4): + High severity alerts. + CRITICAL (5): + Critical severity alerts. + """ + SEVERITY_UNSPECIFIED = 0 + INFORMATIONAL = 1 + LOW = 2 + MEDIUM = 3 + HIGH = 4 + CRITICAL = 5 + + class State(proto.Enum): + r"""Endpoint state + + Values: + STATE_UNSPECIFIED (0): + Not set. + CREATING (1): + Being created. + READY (2): + Active and ready for traffic. + DELETING (3): + Being deleted. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + DELETING = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + network: str = proto.Field( + proto.STRING, + number=5, + ) + endpoint_forwarding_rule: str = proto.Field( + proto.STRING, + number=6, + ) + endpoint_ip: str = proto.Field( + proto.STRING, + number=7, + ) + description: str = proto.Field( + proto.STRING, + number=8, + ) + severity: Severity = proto.Field( + proto.ENUM, + number=9, + enum=Severity, + ) + state: State = proto.Field( + proto.ENUM, + number=12, + enum=State, + ) + traffic_logs: bool = proto.Field( + proto.BOOL, + number=13, + ) + + +class ListEndpointsRequest(proto.Message): + r""" + + Attributes: + parent (str): + Required. The parent, which owns this + collection of endpoints. + page_size (int): + Optional. The maximum number of endpoints to + return. The service may return fewer than this + value. + page_token (str): + Optional. A page token, received from a previous + ``ListEndpoints`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListEndpoints`` must match the call that provided the page + token. + filter (str): + Optional. The filter expression, following + the syntax outlined in + https://google.aip.dev/160. + order_by (str): + Optional. One or more fields to compare and + use to sort the output. See + https://google.aip.dev/132#ordering. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListEndpointsResponse(proto.Message): + r""" + + Attributes: + endpoints (MutableSequence[google.cloud.ids_v1.types.Endpoint]): + The list of endpoints response. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + endpoints: MutableSequence["Endpoint"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Endpoint", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetEndpointRequest(proto.Message): + r""" + + Attributes: + name (str): + Required. The name of the endpoint to retrieve. Format: + ``projects/{project}/locations/{location}/endpoints/{endpoint}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateEndpointRequest(proto.Message): + r""" + + Attributes: + parent (str): + Required. The endpoint's parent. + endpoint_id (str): + Required. The endpoint identifier. This will be part of the + endpoint's resource name. This value must start with a + lowercase letter followed by up to 62 lowercase letters, + numbers, or hyphens, and cannot end with a hyphen. Values + that do not match this pattern will trigger an + INVALID_ARGUMENT error. + endpoint (google.cloud.ids_v1.types.Endpoint): + Required. The endpoint to create. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server will guarantee that for at + least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + endpoint_id: str = proto.Field( + proto.STRING, + number=2, + ) + endpoint: "Endpoint" = proto.Field( + proto.MESSAGE, + number=3, + message="Endpoint", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteEndpointRequest(proto.Message): + r""" + + Attributes: + name (str): + Required. The name of the endpoint to delete. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server will guarantee that for at + least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-ids/mypy.ini b/packages/google-cloud-ids/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-ids/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-ids/noxfile.py b/packages/google-cloud-ids/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-cloud-ids/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-cloud-ids/renovate.json b/packages/google-cloud-ids/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-ids/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-ids/scripts/decrypt-secrets.sh b/packages/google-cloud-ids/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-cloud-ids/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-ids/scripts/fixup_ids_v1_keywords.py b/packages/google-cloud-ids/scripts/fixup_ids_v1_keywords.py new file mode 100644 index 000000000000..a9764e125f68 --- /dev/null +++ b/packages/google-cloud-ids/scripts/fixup_ids_v1_keywords.py @@ -0,0 +1,179 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class idsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_endpoint': ('parent', 'endpoint_id', 'endpoint', 'request_id', ), + 'delete_endpoint': ('name', 'request_id', ), + 'get_endpoint': ('name', ), + 'list_endpoints': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=idsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the ids client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-ids/scripts/readme-gen/readme_gen.py b/packages/google-cloud-ids/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..91b59676bfc7 --- /dev/null +++ b/packages/google-cloud-ids/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template('README.tmpl.rst') + + +def get_help(file): + return subprocess.check_output(['python', file, '--help']).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('source') + parser.add_argument('--destination', default='README.rst') + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals['get_help'] = get_help + + with io.open(source, 'r') as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, 'w') as f: + f.write(output) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-ids/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-ids/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-ids/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-ids/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-ids/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-ids/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-ids/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-ids/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-ids/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-ids/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-ids/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-ids/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-ids/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-ids/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-ids/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-ids/setup.cfg b/packages/google-cloud-ids/setup.cfg new file mode 100644 index 000000000000..c3a2b39f6528 --- /dev/null +++ b/packages/google-cloud-ids/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-ids/setup.py b/packages/google-cloud-ids/setup.py new file mode 100644 index 000000000000..b0a3a821ab33 --- /dev/null +++ b/packages/google-cloud-ids/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-ids" + + +description = "Google Cloud Ids API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/ids/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-ids/testing/.gitignore b/packages/google-cloud-ids/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-ids/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-ids/testing/constraints-3.10.txt b/packages/google-cloud-ids/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-ids/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-ids/testing/constraints-3.11.txt b/packages/google-cloud-ids/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-ids/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-ids/testing/constraints-3.12.txt b/packages/google-cloud-ids/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-ids/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-ids/testing/constraints-3.7.txt b/packages/google-cloud-ids/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-ids/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-ids/testing/constraints-3.8.txt b/packages/google-cloud-ids/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-ids/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-ids/testing/constraints-3.9.txt b/packages/google-cloud-ids/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-ids/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-ids/tests/__init__.py b/packages/google-cloud-ids/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-ids/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-ids/tests/unit/__init__.py b/packages/google-cloud-ids/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-ids/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-ids/tests/unit/gapic/__init__.py b/packages/google-cloud-ids/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-ids/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-ids/tests/unit/gapic/ids_v1/__init__.py b/packages/google-cloud-ids/tests/unit/gapic/ids_v1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-ids/tests/unit/gapic/ids_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-ids/tests/unit/gapic/ids_v1/test_ids.py b/packages/google-cloud-ids/tests/unit/gapic/ids_v1/test_ids.py new file mode 100644 index 000000000000..25e2b262b534 --- /dev/null +++ b/packages/google-cloud-ids/tests/unit/gapic/ids_v1/test_ids.py @@ -0,0 +1,3805 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.ids_v1.services.ids import ( + IDSAsyncClient, + IDSClient, + pagers, + transports, +) +from google.cloud.ids_v1.types import ids + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert IDSClient._get_default_mtls_endpoint(None) is None + assert IDSClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert IDSClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ( + IDSClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + ) + assert ( + IDSClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert IDSClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (IDSClient, "grpc"), + (IDSAsyncClient, "grpc_asyncio"), + (IDSClient, "rest"), + ], +) +def test_ids_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "ids.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://ids.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.IDSGrpcTransport, "grpc"), + (transports.IDSGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.IDSRestTransport, "rest"), + ], +) +def test_ids_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (IDSClient, "grpc"), + (IDSAsyncClient, "grpc_asyncio"), + (IDSClient, "rest"), + ], +) +def test_ids_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "ids.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://ids.googleapis.com" + ) + + +def test_ids_client_get_transport_class(): + transport = IDSClient.get_transport_class() + available_transports = [ + transports.IDSGrpcTransport, + transports.IDSRestTransport, + ] + assert transport in available_transports + + transport = IDSClient.get_transport_class("grpc") + assert transport == transports.IDSGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (IDSClient, transports.IDSGrpcTransport, "grpc"), + (IDSAsyncClient, transports.IDSGrpcAsyncIOTransport, "grpc_asyncio"), + (IDSClient, transports.IDSRestTransport, "rest"), + ], +) +@mock.patch.object(IDSClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IDSClient)) +@mock.patch.object( + IDSAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IDSAsyncClient) +) +def test_ids_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(IDSClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(IDSClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (IDSClient, transports.IDSGrpcTransport, "grpc", "true"), + (IDSAsyncClient, transports.IDSGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (IDSClient, transports.IDSGrpcTransport, "grpc", "false"), + (IDSAsyncClient, transports.IDSGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (IDSClient, transports.IDSRestTransport, "rest", "true"), + (IDSClient, transports.IDSRestTransport, "rest", "false"), + ], +) +@mock.patch.object(IDSClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IDSClient)) +@mock.patch.object( + IDSAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IDSAsyncClient) +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_ids_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [IDSClient, IDSAsyncClient]) +@mock.patch.object(IDSClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IDSClient)) +@mock.patch.object( + IDSAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IDSAsyncClient) +) +def test_ids_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (IDSClient, transports.IDSGrpcTransport, "grpc"), + (IDSAsyncClient, transports.IDSGrpcAsyncIOTransport, "grpc_asyncio"), + (IDSClient, transports.IDSRestTransport, "rest"), + ], +) +def test_ids_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (IDSClient, transports.IDSGrpcTransport, "grpc", grpc_helpers), + ( + IDSAsyncClient, + transports.IDSGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (IDSClient, transports.IDSRestTransport, "rest", None), + ], +) +def test_ids_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_ids_client_client_options_from_dict(): + with mock.patch( + "google.cloud.ids_v1.services.ids.transports.IDSGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = IDSClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (IDSClient, transports.IDSGrpcTransport, "grpc", grpc_helpers), + ( + IDSAsyncClient, + transports.IDSGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_ids_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "ids.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="ids.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + ids.ListEndpointsRequest, + dict, + ], +) +def test_list_endpoints(request_type, transport: str = "grpc"): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = ids.ListEndpointsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == ids.ListEndpointsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEndpointsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_endpoints_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: + client.list_endpoints() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == ids.ListEndpointsRequest() + + +@pytest.mark.asyncio +async def test_list_endpoints_async( + transport: str = "grpc_asyncio", request_type=ids.ListEndpointsRequest +): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + ids.ListEndpointsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == ids.ListEndpointsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEndpointsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_endpoints_async_from_dict(): + await test_list_endpoints_async(request_type=dict) + + +def test_list_endpoints_field_headers(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = ids.ListEndpointsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: + call.return_value = ids.ListEndpointsResponse() + client.list_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_endpoints_field_headers_async(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = ids.ListEndpointsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + ids.ListEndpointsResponse() + ) + await client.list_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_endpoints_flattened(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = ids.ListEndpointsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_endpoints( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_endpoints_flattened_error(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_endpoints( + ids.ListEndpointsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_endpoints_flattened_async(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = ids.ListEndpointsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + ids.ListEndpointsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_endpoints( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_endpoints_flattened_error_async(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_endpoints( + ids.ListEndpointsRequest(), + parent="parent_value", + ) + + +def test_list_endpoints_pager(transport_name: str = "grpc"): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + ids.ListEndpointsResponse( + endpoints=[ + ids.Endpoint(), + ids.Endpoint(), + ids.Endpoint(), + ], + next_page_token="abc", + ), + ids.ListEndpointsResponse( + endpoints=[], + next_page_token="def", + ), + ids.ListEndpointsResponse( + endpoints=[ + ids.Endpoint(), + ], + next_page_token="ghi", + ), + ids.ListEndpointsResponse( + endpoints=[ + ids.Endpoint(), + ids.Endpoint(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_endpoints(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, ids.Endpoint) for i in results) + + +def test_list_endpoints_pages(transport_name: str = "grpc"): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + ids.ListEndpointsResponse( + endpoints=[ + ids.Endpoint(), + ids.Endpoint(), + ids.Endpoint(), + ], + next_page_token="abc", + ), + ids.ListEndpointsResponse( + endpoints=[], + next_page_token="def", + ), + ids.ListEndpointsResponse( + endpoints=[ + ids.Endpoint(), + ], + next_page_token="ghi", + ), + ids.ListEndpointsResponse( + endpoints=[ + ids.Endpoint(), + ids.Endpoint(), + ], + ), + RuntimeError, + ) + pages = list(client.list_endpoints(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_endpoints_async_pager(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_endpoints), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + ids.ListEndpointsResponse( + endpoints=[ + ids.Endpoint(), + ids.Endpoint(), + ids.Endpoint(), + ], + next_page_token="abc", + ), + ids.ListEndpointsResponse( + endpoints=[], + next_page_token="def", + ), + ids.ListEndpointsResponse( + endpoints=[ + ids.Endpoint(), + ], + next_page_token="ghi", + ), + ids.ListEndpointsResponse( + endpoints=[ + ids.Endpoint(), + ids.Endpoint(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_endpoints( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, ids.Endpoint) for i in responses) + + +@pytest.mark.asyncio +async def test_list_endpoints_async_pages(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_endpoints), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + ids.ListEndpointsResponse( + endpoints=[ + ids.Endpoint(), + ids.Endpoint(), + ids.Endpoint(), + ], + next_page_token="abc", + ), + ids.ListEndpointsResponse( + endpoints=[], + next_page_token="def", + ), + ids.ListEndpointsResponse( + endpoints=[ + ids.Endpoint(), + ], + next_page_token="ghi", + ), + ids.ListEndpointsResponse( + endpoints=[ + ids.Endpoint(), + ids.Endpoint(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_endpoints(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + ids.GetEndpointRequest, + dict, + ], +) +def test_get_endpoint(request_type, transport: str = "grpc"): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_endpoint), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = ids.Endpoint( + name="name_value", + network="network_value", + endpoint_forwarding_rule="endpoint_forwarding_rule_value", + endpoint_ip="endpoint_ip_value", + description="description_value", + severity=ids.Endpoint.Severity.INFORMATIONAL, + state=ids.Endpoint.State.CREATING, + traffic_logs=True, + ) + response = client.get_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == ids.GetEndpointRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, ids.Endpoint) + assert response.name == "name_value" + assert response.network == "network_value" + assert response.endpoint_forwarding_rule == "endpoint_forwarding_rule_value" + assert response.endpoint_ip == "endpoint_ip_value" + assert response.description == "description_value" + assert response.severity == ids.Endpoint.Severity.INFORMATIONAL + assert response.state == ids.Endpoint.State.CREATING + assert response.traffic_logs is True + + +def test_get_endpoint_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_endpoint), "__call__") as call: + client.get_endpoint() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == ids.GetEndpointRequest() + + +@pytest.mark.asyncio +async def test_get_endpoint_async( + transport: str = "grpc_asyncio", request_type=ids.GetEndpointRequest +): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_endpoint), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + ids.Endpoint( + name="name_value", + network="network_value", + endpoint_forwarding_rule="endpoint_forwarding_rule_value", + endpoint_ip="endpoint_ip_value", + description="description_value", + severity=ids.Endpoint.Severity.INFORMATIONAL, + state=ids.Endpoint.State.CREATING, + traffic_logs=True, + ) + ) + response = await client.get_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == ids.GetEndpointRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, ids.Endpoint) + assert response.name == "name_value" + assert response.network == "network_value" + assert response.endpoint_forwarding_rule == "endpoint_forwarding_rule_value" + assert response.endpoint_ip == "endpoint_ip_value" + assert response.description == "description_value" + assert response.severity == ids.Endpoint.Severity.INFORMATIONAL + assert response.state == ids.Endpoint.State.CREATING + assert response.traffic_logs is True + + +@pytest.mark.asyncio +async def test_get_endpoint_async_from_dict(): + await test_get_endpoint_async(request_type=dict) + + +def test_get_endpoint_field_headers(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = ids.GetEndpointRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_endpoint), "__call__") as call: + call.return_value = ids.Endpoint() + client.get_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_endpoint_field_headers_async(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = ids.GetEndpointRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_endpoint), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(ids.Endpoint()) + await client.get_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_endpoint_flattened(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_endpoint), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = ids.Endpoint() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_endpoint( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_endpoint_flattened_error(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_endpoint( + ids.GetEndpointRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_endpoint_flattened_async(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_endpoint), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = ids.Endpoint() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(ids.Endpoint()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_endpoint( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_endpoint_flattened_error_async(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_endpoint( + ids.GetEndpointRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + ids.CreateEndpointRequest, + dict, + ], +) +def test_create_endpoint(request_type, transport: str = "grpc"): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_endpoint), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == ids.CreateEndpointRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_endpoint_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_endpoint), "__call__") as call: + client.create_endpoint() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == ids.CreateEndpointRequest() + + +@pytest.mark.asyncio +async def test_create_endpoint_async( + transport: str = "grpc_asyncio", request_type=ids.CreateEndpointRequest +): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_endpoint), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == ids.CreateEndpointRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_endpoint_async_from_dict(): + await test_create_endpoint_async(request_type=dict) + + +def test_create_endpoint_field_headers(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = ids.CreateEndpointRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_endpoint), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_endpoint_field_headers_async(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = ids.CreateEndpointRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_endpoint), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_endpoint_flattened(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_endpoint), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_endpoint( + parent="parent_value", + endpoint=ids.Endpoint(name="name_value"), + endpoint_id="endpoint_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].endpoint + mock_val = ids.Endpoint(name="name_value") + assert arg == mock_val + arg = args[0].endpoint_id + mock_val = "endpoint_id_value" + assert arg == mock_val + + +def test_create_endpoint_flattened_error(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_endpoint( + ids.CreateEndpointRequest(), + parent="parent_value", + endpoint=ids.Endpoint(name="name_value"), + endpoint_id="endpoint_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_endpoint_flattened_async(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_endpoint), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_endpoint( + parent="parent_value", + endpoint=ids.Endpoint(name="name_value"), + endpoint_id="endpoint_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].endpoint + mock_val = ids.Endpoint(name="name_value") + assert arg == mock_val + arg = args[0].endpoint_id + mock_val = "endpoint_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_endpoint_flattened_error_async(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_endpoint( + ids.CreateEndpointRequest(), + parent="parent_value", + endpoint=ids.Endpoint(name="name_value"), + endpoint_id="endpoint_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + ids.DeleteEndpointRequest, + dict, + ], +) +def test_delete_endpoint(request_type, transport: str = "grpc"): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_endpoint), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == ids.DeleteEndpointRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_endpoint_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_endpoint), "__call__") as call: + client.delete_endpoint() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == ids.DeleteEndpointRequest() + + +@pytest.mark.asyncio +async def test_delete_endpoint_async( + transport: str = "grpc_asyncio", request_type=ids.DeleteEndpointRequest +): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_endpoint), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == ids.DeleteEndpointRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_endpoint_async_from_dict(): + await test_delete_endpoint_async(request_type=dict) + + +def test_delete_endpoint_field_headers(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = ids.DeleteEndpointRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_endpoint), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_endpoint_field_headers_async(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = ids.DeleteEndpointRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_endpoint), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_endpoint_flattened(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_endpoint), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_endpoint( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_endpoint_flattened_error(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_endpoint( + ids.DeleteEndpointRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_endpoint_flattened_async(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_endpoint), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_endpoint( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_endpoint_flattened_error_async(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_endpoint( + ids.DeleteEndpointRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + ids.ListEndpointsRequest, + dict, + ], +) +def test_list_endpoints_rest(request_type): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ids.ListEndpointsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ids.ListEndpointsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_endpoints(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEndpointsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_endpoints_rest_required_fields(request_type=ids.ListEndpointsRequest): + transport_class = transports.IDSRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_endpoints._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_endpoints._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ids.ListEndpointsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = ids.ListEndpointsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_endpoints(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_endpoints_rest_unset_required_fields(): + transport = transports.IDSRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_endpoints._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_endpoints_rest_interceptors(null_interceptor): + transport = transports.IDSRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IDSRestInterceptor(), + ) + client = IDSClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IDSRestInterceptor, "post_list_endpoints" + ) as post, mock.patch.object( + transports.IDSRestInterceptor, "pre_list_endpoints" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ids.ListEndpointsRequest.pb(ids.ListEndpointsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ids.ListEndpointsResponse.to_json( + ids.ListEndpointsResponse() + ) + + request = ids.ListEndpointsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ids.ListEndpointsResponse() + + client.list_endpoints( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_endpoints_rest_bad_request( + transport: str = "rest", request_type=ids.ListEndpointsRequest +): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_endpoints(request) + + +def test_list_endpoints_rest_flattened(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ids.ListEndpointsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ids.ListEndpointsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_endpoints(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/endpoints" % client.transport._host, + args[1], + ) + + +def test_list_endpoints_rest_flattened_error(transport: str = "rest"): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_endpoints( + ids.ListEndpointsRequest(), + parent="parent_value", + ) + + +def test_list_endpoints_rest_pager(transport: str = "rest"): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + ids.ListEndpointsResponse( + endpoints=[ + ids.Endpoint(), + ids.Endpoint(), + ids.Endpoint(), + ], + next_page_token="abc", + ), + ids.ListEndpointsResponse( + endpoints=[], + next_page_token="def", + ), + ids.ListEndpointsResponse( + endpoints=[ + ids.Endpoint(), + ], + next_page_token="ghi", + ), + ids.ListEndpointsResponse( + endpoints=[ + ids.Endpoint(), + ids.Endpoint(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(ids.ListEndpointsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_endpoints(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, ids.Endpoint) for i in results) + + pages = list(client.list_endpoints(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + ids.GetEndpointRequest, + dict, + ], +) +def test_get_endpoint_rest(request_type): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/endpoints/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ids.Endpoint( + name="name_value", + network="network_value", + endpoint_forwarding_rule="endpoint_forwarding_rule_value", + endpoint_ip="endpoint_ip_value", + description="description_value", + severity=ids.Endpoint.Severity.INFORMATIONAL, + state=ids.Endpoint.State.CREATING, + traffic_logs=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ids.Endpoint.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_endpoint(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, ids.Endpoint) + assert response.name == "name_value" + assert response.network == "network_value" + assert response.endpoint_forwarding_rule == "endpoint_forwarding_rule_value" + assert response.endpoint_ip == "endpoint_ip_value" + assert response.description == "description_value" + assert response.severity == ids.Endpoint.Severity.INFORMATIONAL + assert response.state == ids.Endpoint.State.CREATING + assert response.traffic_logs is True + + +def test_get_endpoint_rest_required_fields(request_type=ids.GetEndpointRequest): + transport_class = transports.IDSRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_endpoint._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_endpoint._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ids.Endpoint() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = ids.Endpoint.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_endpoint(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_endpoint_rest_unset_required_fields(): + transport = transports.IDSRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_endpoint._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_endpoint_rest_interceptors(null_interceptor): + transport = transports.IDSRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IDSRestInterceptor(), + ) + client = IDSClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.IDSRestInterceptor, "post_get_endpoint" + ) as post, mock.patch.object( + transports.IDSRestInterceptor, "pre_get_endpoint" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ids.GetEndpointRequest.pb(ids.GetEndpointRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ids.Endpoint.to_json(ids.Endpoint()) + + request = ids.GetEndpointRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ids.Endpoint() + + client.get_endpoint( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_endpoint_rest_bad_request( + transport: str = "rest", request_type=ids.GetEndpointRequest +): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/endpoints/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_endpoint(request) + + +def test_get_endpoint_rest_flattened(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ids.Endpoint() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/endpoints/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ids.Endpoint.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_endpoint(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/endpoints/*}" % client.transport._host, + args[1], + ) + + +def test_get_endpoint_rest_flattened_error(transport: str = "rest"): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_endpoint( + ids.GetEndpointRequest(), + name="name_value", + ) + + +def test_get_endpoint_rest_error(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + ids.CreateEndpointRequest, + dict, + ], +) +def test_create_endpoint_rest(request_type): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["endpoint"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "network": "network_value", + "endpoint_forwarding_rule": "endpoint_forwarding_rule_value", + "endpoint_ip": "endpoint_ip_value", + "description": "description_value", + "severity": 1, + "state": 1, + "traffic_logs": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_endpoint(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_endpoint_rest_required_fields(request_type=ids.CreateEndpointRequest): + transport_class = transports.IDSRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["endpoint_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "endpointId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_endpoint._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "endpointId" in jsonified_request + assert jsonified_request["endpointId"] == request_init["endpoint_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["endpointId"] = "endpoint_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_endpoint._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "endpoint_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "endpointId" in jsonified_request + assert jsonified_request["endpointId"] == "endpoint_id_value" + + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_endpoint(request) + + expected_params = [ + ( + "endpointId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_endpoint_rest_unset_required_fields(): + transport = transports.IDSRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_endpoint._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "endpointId", + "requestId", + ) + ) + & set( + ( + "parent", + "endpointId", + "endpoint", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_endpoint_rest_interceptors(null_interceptor): + transport = transports.IDSRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IDSRestInterceptor(), + ) + client = IDSClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.IDSRestInterceptor, "post_create_endpoint" + ) as post, mock.patch.object( + transports.IDSRestInterceptor, "pre_create_endpoint" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ids.CreateEndpointRequest.pb(ids.CreateEndpointRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = ids.CreateEndpointRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_endpoint( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_endpoint_rest_bad_request( + transport: str = "rest", request_type=ids.CreateEndpointRequest +): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["endpoint"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "network": "network_value", + "endpoint_forwarding_rule": "endpoint_forwarding_rule_value", + "endpoint_ip": "endpoint_ip_value", + "description": "description_value", + "severity": 1, + "state": 1, + "traffic_logs": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_endpoint(request) + + +def test_create_endpoint_rest_flattened(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + endpoint=ids.Endpoint(name="name_value"), + endpoint_id="endpoint_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_endpoint(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/endpoints" % client.transport._host, + args[1], + ) + + +def test_create_endpoint_rest_flattened_error(transport: str = "rest"): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_endpoint( + ids.CreateEndpointRequest(), + parent="parent_value", + endpoint=ids.Endpoint(name="name_value"), + endpoint_id="endpoint_id_value", + ) + + +def test_create_endpoint_rest_error(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + ids.DeleteEndpointRequest, + dict, + ], +) +def test_delete_endpoint_rest(request_type): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/endpoints/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_endpoint(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_endpoint_rest_required_fields(request_type=ids.DeleteEndpointRequest): + transport_class = transports.IDSRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_endpoint._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_endpoint._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_endpoint(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_endpoint_rest_unset_required_fields(): + transport = transports.IDSRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_endpoint._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_endpoint_rest_interceptors(null_interceptor): + transport = transports.IDSRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IDSRestInterceptor(), + ) + client = IDSClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.IDSRestInterceptor, "post_delete_endpoint" + ) as post, mock.patch.object( + transports.IDSRestInterceptor, "pre_delete_endpoint" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ids.DeleteEndpointRequest.pb(ids.DeleteEndpointRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = ids.DeleteEndpointRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_endpoint( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_endpoint_rest_bad_request( + transport: str = "rest", request_type=ids.DeleteEndpointRequest +): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/endpoints/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_endpoint(request) + + +def test_delete_endpoint_rest_flattened(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/endpoints/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_endpoint(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/endpoints/*}" % client.transport._host, + args[1], + ) + + +def test_delete_endpoint_rest_flattened_error(transport: str = "rest"): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_endpoint( + ids.DeleteEndpointRequest(), + name="name_value", + ) + + +def test_delete_endpoint_rest_error(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.IDSGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.IDSGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IDSClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.IDSGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = IDSClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = IDSClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.IDSGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IDSClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.IDSGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = IDSClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.IDSGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.IDSGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.IDSGrpcTransport, + transports.IDSGrpcAsyncIOTransport, + transports.IDSRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = IDSClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.IDSGrpcTransport, + ) + + +def test_ids_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.IDSTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_ids_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.ids_v1.services.ids.transports.IDSTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.IDSTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_endpoints", + "get_endpoint", + "create_endpoint", + "delete_endpoint", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_ids_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.ids_v1.services.ids.transports.IDSTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.IDSTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_ids_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.ids_v1.services.ids.transports.IDSTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.IDSTransport() + adc.assert_called_once() + + +def test_ids_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + IDSClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.IDSGrpcTransport, + transports.IDSGrpcAsyncIOTransport, + ], +) +def test_ids_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.IDSGrpcTransport, + transports.IDSGrpcAsyncIOTransport, + transports.IDSRestTransport, + ], +) +def test_ids_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.IDSGrpcTransport, grpc_helpers), + (transports.IDSGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_ids_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "ids.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="ids.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", [transports.IDSGrpcTransport, transports.IDSGrpcAsyncIOTransport] +) +def test_ids_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_ids_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.IDSRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_ids_rest_lro_client(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_ids_host_no_port(transport_name): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint="ids.googleapis.com"), + transport=transport_name, + ) + assert client.transport._host == ( + "ids.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://ids.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_ids_host_with_port(transport_name): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="ids.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "ids.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://ids.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_ids_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = IDSClient( + credentials=creds1, + transport=transport_name, + ) + client2 = IDSClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_endpoints._session + session2 = client2.transport.list_endpoints._session + assert session1 != session2 + session1 = client1.transport.get_endpoint._session + session2 = client2.transport.get_endpoint._session + assert session1 != session2 + session1 = client1.transport.create_endpoint._session + session2 = client2.transport.create_endpoint._session + assert session1 != session2 + session1 = client1.transport.delete_endpoint._session + session2 = client2.transport.delete_endpoint._session + assert session1 != session2 + + +def test_ids_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.IDSGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_ids_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.IDSGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", [transports.IDSGrpcTransport, transports.IDSGrpcAsyncIOTransport] +) +def test_ids_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", [transports.IDSGrpcTransport, transports.IDSGrpcAsyncIOTransport] +) +def test_ids_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_ids_grpc_lro_client(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_ids_grpc_lro_async_client(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_endpoint_path(): + project = "squid" + location = "clam" + endpoint = "whelk" + expected = "projects/{project}/locations/{location}/endpoints/{endpoint}".format( + project=project, + location=location, + endpoint=endpoint, + ) + actual = IDSClient.endpoint_path(project, location, endpoint) + assert expected == actual + + +def test_parse_endpoint_path(): + expected = { + "project": "octopus", + "location": "oyster", + "endpoint": "nudibranch", + } + path = IDSClient.endpoint_path(**expected) + + # Check that the path construction is reversible. + actual = IDSClient.parse_endpoint_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = IDSClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = IDSClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = IDSClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = IDSClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = IDSClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = IDSClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = IDSClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = IDSClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = IDSClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = IDSClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = IDSClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = IDSClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = IDSClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = IDSClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = IDSClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.IDSTransport, "_prep_wrapped_messages") as prep: + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.IDSTransport, "_prep_wrapped_messages") as prep: + transport_class = IDSClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = IDSAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = IDSClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (IDSClient, transports.IDSGrpcTransport), + (IDSAsyncClient, transports.IDSGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-rapidmigrationassessment/.OwlBot.yaml b/packages/google-cloud-rapidmigrationassessment/.OwlBot.yaml new file mode 100644 index 000000000000..01be8a356ffd --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/.OwlBot.yaml @@ -0,0 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/rapidmigrationassessment/(.*)/.*-py + dest: /owl-bot-staging/google-cloud-rapidmigrationassessment/$1 diff --git a/packages/google-cloud-rapidmigrationassessment/.coveragerc b/packages/google-cloud-rapidmigrationassessment/.coveragerc new file mode 100644 index 000000000000..1d5128b1d21e --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/rapidmigrationassessment/__init__.py + google/cloud/rapidmigrationassessment/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-rapidmigrationassessment/.flake8 b/packages/google-cloud-rapidmigrationassessment/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-rapidmigrationassessment/.gitignore b/packages/google-cloud-rapidmigrationassessment/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-rapidmigrationassessment/.repo-metadata.json b/packages/google-cloud-rapidmigrationassessment/.repo-metadata.json new file mode 100644 index 000000000000..2d34b1f1d0dc --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "rapidmigrationassessment", + "name_pretty": "Rapid Migration Assessment API", + "api_description": "The Rapid Migration Assessment service is our first-party migration assessment and planning tool.", + "product_documentation": "https://cloud.google.com/migration-center/docs", + "client_documentation": "https://cloud.google.com/python/docs/reference/rapidmigrationassessment/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-rapidmigrationassessment", + "api_id": "rapidmigrationassessment.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "rapidmigrationassessment" +} diff --git a/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md b/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md new file mode 100644 index 000000000000..92d143ebb2ce --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 0.1.0 (2023-05-29) + + +### Features + +* add initial files for google.cloud.rapidmigrationassessment.v1 ([2687f0e](https://github.com/googleapis/google-cloud-python/commit/2687f0e5238e80c9cdf707f8d175fce13aeac970)) + +## Changelog diff --git a/packages/google-cloud-rapidmigrationassessment/CODE_OF_CONDUCT.md b/packages/google-cloud-rapidmigrationassessment/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-rapidmigrationassessment/CONTRIBUTING.rst b/packages/google-cloud-rapidmigrationassessment/CONTRIBUTING.rst new file mode 100644 index 000000000000..6fbaf7f1b791 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-rapidmigrationassessment + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-rapidmigrationassessment/LICENSE b/packages/google-cloud-rapidmigrationassessment/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-rapidmigrationassessment/MANIFEST.in b/packages/google-cloud-rapidmigrationassessment/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-rapidmigrationassessment/README.rst b/packages/google-cloud-rapidmigrationassessment/README.rst new file mode 100644 index 000000000000..89bd0de566ea --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/README.rst @@ -0,0 +1,107 @@ +Python Client for Rapid Migration Assessment API +================================================ + +|preview| |pypi| |versions| + +`Rapid Migration Assessment API`_: The Rapid Migration Assessment service is our first-party migration assessment and planning tool. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-rapidmigrationassessment.svg + :target: https://pypi.org/project/google-cloud-rapidmigrationassessment/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-rapidmigrationassessment.svg + :target: https://pypi.org/project/google-cloud-rapidmigrationassessment/ +.. _Rapid Migration Assessment API: https://cloud.google.com/migration-center/docs +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/rapidmigrationassessment/latest +.. _Product Documentation: https://cloud.google.com/migration-center/docs + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Rapid Migration Assessment API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Rapid Migration Assessment API.: https://cloud.google.com/migration-center/docs +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-rapidmigrationassessment + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-rapidmigrationassessment + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Rapid Migration Assessment API + to see other available methods on the client. +- Read the `Rapid Migration Assessment API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Rapid Migration Assessment API Product documentation: https://cloud.google.com/migration-center/docs +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-rapidmigrationassessment/SECURITY.md b/packages/google-cloud-rapidmigrationassessment/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-rapidmigrationassessment/docs/CHANGELOG.md b/packages/google-cloud-rapidmigrationassessment/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-rapidmigrationassessment/docs/README.rst b/packages/google-cloud-rapidmigrationassessment/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-rapidmigrationassessment/docs/_static/custom.css b/packages/google-cloud-rapidmigrationassessment/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-rapidmigrationassessment/docs/_templates/layout.html b/packages/google-cloud-rapidmigrationassessment/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-rapidmigrationassessment/docs/conf.py b/packages/google-cloud-rapidmigrationassessment/docs/conf.py new file mode 100644 index 000000000000..510097542e05 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-rapidmigrationassessment documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-rapidmigrationassessment" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-rapidmigrationassessment", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-rapidmigrationassessment-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-rapidmigrationassessment.tex", + "google-cloud-rapidmigrationassessment Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-rapidmigrationassessment", + "google-cloud-rapidmigrationassessment Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-rapidmigrationassessment", + "google-cloud-rapidmigrationassessment Documentation", + author, + "google-cloud-rapidmigrationassessment", + "google-cloud-rapidmigrationassessment Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-rapidmigrationassessment/docs/index.rst b/packages/google-cloud-rapidmigrationassessment/docs/index.rst new file mode 100644 index 000000000000..89c2cdd32460 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + rapidmigrationassessment_v1/services + rapidmigrationassessment_v1/types + + +Changelog +--------- + +For a list of all ``google-cloud-rapidmigrationassessment`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-rapidmigrationassessment/docs/multiprocessing.rst b/packages/google-cloud-rapidmigrationassessment/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-rapidmigrationassessment/docs/rapidmigrationassessment_v1/rapid_migration_assessment.rst b/packages/google-cloud-rapidmigrationassessment/docs/rapidmigrationassessment_v1/rapid_migration_assessment.rst new file mode 100644 index 000000000000..78cf76363fda --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/docs/rapidmigrationassessment_v1/rapid_migration_assessment.rst @@ -0,0 +1,10 @@ +RapidMigrationAssessment +------------------------------------------ + +.. automodule:: google.cloud.rapidmigrationassessment_v1.services.rapid_migration_assessment + :members: + :inherited-members: + +.. automodule:: google.cloud.rapidmigrationassessment_v1.services.rapid_migration_assessment.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-rapidmigrationassessment/docs/rapidmigrationassessment_v1/services.rst b/packages/google-cloud-rapidmigrationassessment/docs/rapidmigrationassessment_v1/services.rst new file mode 100644 index 000000000000..574dfc0ba50e --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/docs/rapidmigrationassessment_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Rapidmigrationassessment v1 API +========================================================= +.. toctree:: + :maxdepth: 2 + + rapid_migration_assessment diff --git a/packages/google-cloud-rapidmigrationassessment/docs/rapidmigrationassessment_v1/types.rst b/packages/google-cloud-rapidmigrationassessment/docs/rapidmigrationassessment_v1/types.rst new file mode 100644 index 000000000000..115fea01b7c3 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/docs/rapidmigrationassessment_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Rapidmigrationassessment v1 API +====================================================== + +.. automodule:: google.cloud.rapidmigrationassessment_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment/__init__.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment/__init__.py new file mode 100644 index 000000000000..31c1c7aee67c --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment/__init__.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.rapidmigrationassessment import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.rapidmigrationassessment_v1.services.rapid_migration_assessment.async_client import ( + RapidMigrationAssessmentAsyncClient, +) +from google.cloud.rapidmigrationassessment_v1.services.rapid_migration_assessment.client import ( + RapidMigrationAssessmentClient, +) +from google.cloud.rapidmigrationassessment_v1.types.api_entities import ( + Annotation, + Collector, + GuestOsScan, + VSphereScan, +) +from google.cloud.rapidmigrationassessment_v1.types.rapidmigrationassessment import ( + CreateAnnotationRequest, + CreateCollectorRequest, + DeleteCollectorRequest, + GetAnnotationRequest, + GetCollectorRequest, + ListCollectorsRequest, + ListCollectorsResponse, + OperationMetadata, + PauseCollectorRequest, + RegisterCollectorRequest, + ResumeCollectorRequest, + UpdateCollectorRequest, +) + +__all__ = ( + "RapidMigrationAssessmentClient", + "RapidMigrationAssessmentAsyncClient", + "Annotation", + "Collector", + "GuestOsScan", + "VSphereScan", + "CreateAnnotationRequest", + "CreateCollectorRequest", + "DeleteCollectorRequest", + "GetAnnotationRequest", + "GetCollectorRequest", + "ListCollectorsRequest", + "ListCollectorsResponse", + "OperationMetadata", + "PauseCollectorRequest", + "RegisterCollectorRequest", + "ResumeCollectorRequest", + "UpdateCollectorRequest", +) diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment/gapic_version.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment/gapic_version.py new file mode 100644 index 000000000000..405b1cebcf15 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment/py.typed b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment/py.typed new file mode 100644 index 000000000000..0c2687a3d305 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-rapidmigrationassessment package uses inline types. diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/__init__.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/__init__.py new file mode 100644 index 000000000000..61f7151bf475 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/__init__.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.rapidmigrationassessment_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.rapid_migration_assessment import ( + RapidMigrationAssessmentAsyncClient, + RapidMigrationAssessmentClient, +) +from .types.api_entities import Annotation, Collector, GuestOsScan, VSphereScan +from .types.rapidmigrationassessment import ( + CreateAnnotationRequest, + CreateCollectorRequest, + DeleteCollectorRequest, + GetAnnotationRequest, + GetCollectorRequest, + ListCollectorsRequest, + ListCollectorsResponse, + OperationMetadata, + PauseCollectorRequest, + RegisterCollectorRequest, + ResumeCollectorRequest, + UpdateCollectorRequest, +) + +__all__ = ( + "RapidMigrationAssessmentAsyncClient", + "Annotation", + "Collector", + "CreateAnnotationRequest", + "CreateCollectorRequest", + "DeleteCollectorRequest", + "GetAnnotationRequest", + "GetCollectorRequest", + "GuestOsScan", + "ListCollectorsRequest", + "ListCollectorsResponse", + "OperationMetadata", + "PauseCollectorRequest", + "RapidMigrationAssessmentClient", + "RegisterCollectorRequest", + "ResumeCollectorRequest", + "UpdateCollectorRequest", + "VSphereScan", +) diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/gapic_metadata.json b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/gapic_metadata.json new file mode 100644 index 000000000000..0baeaf73ab8b --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/gapic_metadata.json @@ -0,0 +1,178 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.rapidmigrationassessment_v1", + "protoPackage": "google.cloud.rapidmigrationassessment.v1", + "schema": "1.0", + "services": { + "RapidMigrationAssessment": { + "clients": { + "grpc": { + "libraryClient": "RapidMigrationAssessmentClient", + "rpcs": { + "CreateAnnotation": { + "methods": [ + "create_annotation" + ] + }, + "CreateCollector": { + "methods": [ + "create_collector" + ] + }, + "DeleteCollector": { + "methods": [ + "delete_collector" + ] + }, + "GetAnnotation": { + "methods": [ + "get_annotation" + ] + }, + "GetCollector": { + "methods": [ + "get_collector" + ] + }, + "ListCollectors": { + "methods": [ + "list_collectors" + ] + }, + "PauseCollector": { + "methods": [ + "pause_collector" + ] + }, + "RegisterCollector": { + "methods": [ + "register_collector" + ] + }, + "ResumeCollector": { + "methods": [ + "resume_collector" + ] + }, + "UpdateCollector": { + "methods": [ + "update_collector" + ] + } + } + }, + "grpc-async": { + "libraryClient": "RapidMigrationAssessmentAsyncClient", + "rpcs": { + "CreateAnnotation": { + "methods": [ + "create_annotation" + ] + }, + "CreateCollector": { + "methods": [ + "create_collector" + ] + }, + "DeleteCollector": { + "methods": [ + "delete_collector" + ] + }, + "GetAnnotation": { + "methods": [ + "get_annotation" + ] + }, + "GetCollector": { + "methods": [ + "get_collector" + ] + }, + "ListCollectors": { + "methods": [ + "list_collectors" + ] + }, + "PauseCollector": { + "methods": [ + "pause_collector" + ] + }, + "RegisterCollector": { + "methods": [ + "register_collector" + ] + }, + "ResumeCollector": { + "methods": [ + "resume_collector" + ] + }, + "UpdateCollector": { + "methods": [ + "update_collector" + ] + } + } + }, + "rest": { + "libraryClient": "RapidMigrationAssessmentClient", + "rpcs": { + "CreateAnnotation": { + "methods": [ + "create_annotation" + ] + }, + "CreateCollector": { + "methods": [ + "create_collector" + ] + }, + "DeleteCollector": { + "methods": [ + "delete_collector" + ] + }, + "GetAnnotation": { + "methods": [ + "get_annotation" + ] + }, + "GetCollector": { + "methods": [ + "get_collector" + ] + }, + "ListCollectors": { + "methods": [ + "list_collectors" + ] + }, + "PauseCollector": { + "methods": [ + "pause_collector" + ] + }, + "RegisterCollector": { + "methods": [ + "register_collector" + ] + }, + "ResumeCollector": { + "methods": [ + "resume_collector" + ] + }, + "UpdateCollector": { + "methods": [ + "update_collector" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/gapic_version.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/gapic_version.py new file mode 100644 index 000000000000..405b1cebcf15 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/py.typed b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/py.typed new file mode 100644 index 000000000000..0c2687a3d305 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-rapidmigrationassessment package uses inline types. diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/__init__.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/__init__.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/__init__.py new file mode 100644 index 000000000000..31fb97304334 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import RapidMigrationAssessmentAsyncClient +from .client import RapidMigrationAssessmentClient + +__all__ = ( + "RapidMigrationAssessmentClient", + "RapidMigrationAssessmentAsyncClient", +) diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/async_client.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/async_client.py new file mode 100644 index 000000000000..e78e6632de5e --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/async_client.py @@ -0,0 +1,1784 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.rapidmigrationassessment_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.rapidmigrationassessment_v1.services.rapid_migration_assessment import ( + pagers, +) +from google.cloud.rapidmigrationassessment_v1.types import ( + api_entities, + rapidmigrationassessment, +) + +from .client import RapidMigrationAssessmentClient +from .transports.base import DEFAULT_CLIENT_INFO, RapidMigrationAssessmentTransport +from .transports.grpc_asyncio import RapidMigrationAssessmentGrpcAsyncIOTransport + + +class RapidMigrationAssessmentAsyncClient: + """Rapid Migration Assessment service""" + + _client: RapidMigrationAssessmentClient + + DEFAULT_ENDPOINT = RapidMigrationAssessmentClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = RapidMigrationAssessmentClient.DEFAULT_MTLS_ENDPOINT + + annotation_path = staticmethod(RapidMigrationAssessmentClient.annotation_path) + parse_annotation_path = staticmethod( + RapidMigrationAssessmentClient.parse_annotation_path + ) + collector_path = staticmethod(RapidMigrationAssessmentClient.collector_path) + parse_collector_path = staticmethod( + RapidMigrationAssessmentClient.parse_collector_path + ) + common_billing_account_path = staticmethod( + RapidMigrationAssessmentClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + RapidMigrationAssessmentClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(RapidMigrationAssessmentClient.common_folder_path) + parse_common_folder_path = staticmethod( + RapidMigrationAssessmentClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + RapidMigrationAssessmentClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + RapidMigrationAssessmentClient.parse_common_organization_path + ) + common_project_path = staticmethod( + RapidMigrationAssessmentClient.common_project_path + ) + parse_common_project_path = staticmethod( + RapidMigrationAssessmentClient.parse_common_project_path + ) + common_location_path = staticmethod( + RapidMigrationAssessmentClient.common_location_path + ) + parse_common_location_path = staticmethod( + RapidMigrationAssessmentClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RapidMigrationAssessmentAsyncClient: The constructed client. + """ + return RapidMigrationAssessmentClient.from_service_account_info.__func__(RapidMigrationAssessmentAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RapidMigrationAssessmentAsyncClient: The constructed client. + """ + return RapidMigrationAssessmentClient.from_service_account_file.__func__(RapidMigrationAssessmentAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return RapidMigrationAssessmentClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> RapidMigrationAssessmentTransport: + """Returns the transport used by the client instance. + + Returns: + RapidMigrationAssessmentTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(RapidMigrationAssessmentClient).get_transport_class, + type(RapidMigrationAssessmentClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, RapidMigrationAssessmentTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the rapid migration assessment client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.RapidMigrationAssessmentTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = RapidMigrationAssessmentClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_collector( + self, + request: Optional[ + Union[rapidmigrationassessment.CreateCollectorRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + collector: Optional[api_entities.Collector] = None, + collector_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a Collector to manage the on-prem appliance + which collects information about Customer assets. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + async def sample_create_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.CreateCollectorRequest( + parent="parent_value", + collector_id="collector_id_value", + ) + + # Make the request + operation = client.create_collector(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.rapidmigrationassessment_v1.types.CreateCollectorRequest, dict]]): + The request object. Message for creating a Collector. + parent (:class:`str`): + Required. Name of the parent + (project+location). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + collector (:class:`google.cloud.rapidmigrationassessment_v1.types.Collector`): + Required. The resource being created. + This corresponds to the ``collector`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + collector_id (:class:`str`): + Required. Id of the requesting + object. + + This corresponds to the ``collector_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.rapidmigrationassessment_v1.types.Collector` + Message describing Collector object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, collector, collector_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = rapidmigrationassessment.CreateCollectorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if collector is not None: + request.collector = collector + if collector_id is not None: + request.collector_id = collector_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_collector, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + api_entities.Collector, + metadata_type=rapidmigrationassessment.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_annotation( + self, + request: Optional[ + Union[rapidmigrationassessment.CreateAnnotationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + annotation: Optional[api_entities.Annotation] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an Annotation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + async def sample_create_annotation(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.CreateAnnotationRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_annotation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.rapidmigrationassessment_v1.types.CreateAnnotationRequest, dict]]): + The request object. Message for creating an AnnotationS. + parent (:class:`str`): + Required. Name of the parent + (project+location). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + annotation (:class:`google.cloud.rapidmigrationassessment_v1.types.Annotation`): + Required. The resource being created. + This corresponds to the ``annotation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.rapidmigrationassessment_v1.types.Annotation` + Message describing an Annotation + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, annotation]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = rapidmigrationassessment.CreateAnnotationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if annotation is not None: + request.annotation = annotation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_annotation, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + api_entities.Annotation, + metadata_type=rapidmigrationassessment.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_annotation( + self, + request: Optional[ + Union[rapidmigrationassessment.GetAnnotationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> api_entities.Annotation: + r"""Gets details of a single Annotation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + async def sample_get_annotation(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.GetAnnotationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_annotation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.rapidmigrationassessment_v1.types.GetAnnotationRequest, dict]]): + The request object. Message for getting a specific + Annotation + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.rapidmigrationassessment_v1.types.Annotation: + Message describing an Annotation + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = rapidmigrationassessment.GetAnnotationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_annotation, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_collectors( + self, + request: Optional[ + Union[rapidmigrationassessment.ListCollectorsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCollectorsAsyncPager: + r"""Lists Collectors in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + async def sample_list_collectors(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.ListCollectorsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_collectors(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.rapidmigrationassessment_v1.types.ListCollectorsRequest, dict]]): + The request object. Message for requesting list of + Collectors. + parent (:class:`str`): + Required. Parent value for + ListCollectorsRequest. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.rapidmigrationassessment_v1.services.rapid_migration_assessment.pagers.ListCollectorsAsyncPager: + Message for response to listing + Collectors. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = rapidmigrationassessment.ListCollectorsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_collectors, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListCollectorsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_collector( + self, + request: Optional[ + Union[rapidmigrationassessment.GetCollectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> api_entities.Collector: + r"""Gets details of a single Collector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + async def sample_get_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.GetCollectorRequest( + name="name_value", + ) + + # Make the request + response = await client.get_collector(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.rapidmigrationassessment_v1.types.GetCollectorRequest, dict]]): + The request object. Message for getting a specific + Collector. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.rapidmigrationassessment_v1.types.Collector: + Message describing Collector object. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = rapidmigrationassessment.GetCollectorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_collector, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_collector( + self, + request: Optional[ + Union[rapidmigrationassessment.UpdateCollectorRequest, dict] + ] = None, + *, + collector: Optional[api_entities.Collector] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single Collector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + async def sample_update_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.UpdateCollectorRequest( + ) + + # Make the request + operation = client.update_collector(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.rapidmigrationassessment_v1.types.UpdateCollectorRequest, dict]]): + The request object. Message for updating a Collector. + collector (:class:`google.cloud.rapidmigrationassessment_v1.types.Collector`): + Required. The resource being updated. + This corresponds to the ``collector`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the Collector resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.rapidmigrationassessment_v1.types.Collector` + Message describing Collector object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([collector, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = rapidmigrationassessment.UpdateCollectorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if collector is not None: + request.collector = collector + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_collector, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("collector.name", request.collector.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + api_entities.Collector, + metadata_type=rapidmigrationassessment.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_collector( + self, + request: Optional[ + Union[rapidmigrationassessment.DeleteCollectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Collector - changes state of + collector to "Deleting". + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + async def sample_delete_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.DeleteCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_collector(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.rapidmigrationassessment_v1.types.DeleteCollectorRequest, dict]]): + The request object. Message for deleting a Collector. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.rapidmigrationassessment_v1.types.Collector` + Message describing Collector object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = rapidmigrationassessment.DeleteCollectorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_collector, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + api_entities.Collector, + metadata_type=rapidmigrationassessment.OperationMetadata, + ) + + # Done; return the response. + return response + + async def resume_collector( + self, + request: Optional[ + Union[rapidmigrationassessment.ResumeCollectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Resumes the given collector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + async def sample_resume_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.ResumeCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.resume_collector(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.rapidmigrationassessment_v1.types.ResumeCollectorRequest, dict]]): + The request object. Message for resuming a Collector. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.rapidmigrationassessment_v1.types.Collector` + Message describing Collector object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = rapidmigrationassessment.ResumeCollectorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resume_collector, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + api_entities.Collector, + metadata_type=rapidmigrationassessment.OperationMetadata, + ) + + # Done; return the response. + return response + + async def register_collector( + self, + request: Optional[ + Union[rapidmigrationassessment.RegisterCollectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Registers the given collector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + async def sample_register_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.RegisterCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.register_collector(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.rapidmigrationassessment_v1.types.RegisterCollectorRequest, dict]]): + The request object. Message for registering a Collector. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.rapidmigrationassessment_v1.types.Collector` + Message describing Collector object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = rapidmigrationassessment.RegisterCollectorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.register_collector, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + api_entities.Collector, + metadata_type=rapidmigrationassessment.OperationMetadata, + ) + + # Done; return the response. + return response + + async def pause_collector( + self, + request: Optional[ + Union[rapidmigrationassessment.PauseCollectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Pauses the given collector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + async def sample_pause_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.PauseCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.pause_collector(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.rapidmigrationassessment_v1.types.PauseCollectorRequest, dict]]): + The request object. Message for pausing a Collector. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.rapidmigrationassessment_v1.types.Collector` + Message describing Collector object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = rapidmigrationassessment.PauseCollectorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pause_collector, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + api_entities.Collector, + metadata_type=rapidmigrationassessment.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("RapidMigrationAssessmentAsyncClient",) diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/client.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/client.py new file mode 100644 index 000000000000..4885757bfa29 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/client.py @@ -0,0 +1,2003 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.rapidmigrationassessment_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.rapidmigrationassessment_v1.services.rapid_migration_assessment import ( + pagers, +) +from google.cloud.rapidmigrationassessment_v1.types import ( + api_entities, + rapidmigrationassessment, +) + +from .transports.base import DEFAULT_CLIENT_INFO, RapidMigrationAssessmentTransport +from .transports.grpc import RapidMigrationAssessmentGrpcTransport +from .transports.grpc_asyncio import RapidMigrationAssessmentGrpcAsyncIOTransport +from .transports.rest import RapidMigrationAssessmentRestTransport + + +class RapidMigrationAssessmentClientMeta(type): + """Metaclass for the RapidMigrationAssessment client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[RapidMigrationAssessmentTransport]] + _transport_registry["grpc"] = RapidMigrationAssessmentGrpcTransport + _transport_registry["grpc_asyncio"] = RapidMigrationAssessmentGrpcAsyncIOTransport + _transport_registry["rest"] = RapidMigrationAssessmentRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[RapidMigrationAssessmentTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RapidMigrationAssessmentClient(metaclass=RapidMigrationAssessmentClientMeta): + """Rapid Migration Assessment service""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "rapidmigrationassessment.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RapidMigrationAssessmentClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RapidMigrationAssessmentClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RapidMigrationAssessmentTransport: + """Returns the transport used by the client instance. + + Returns: + RapidMigrationAssessmentTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def annotation_path( + project: str, + location: str, + annotation: str, + ) -> str: + """Returns a fully-qualified annotation string.""" + return ( + "projects/{project}/locations/{location}/annotations/{annotation}".format( + project=project, + location=location, + annotation=annotation, + ) + ) + + @staticmethod + def parse_annotation_path(path: str) -> Dict[str, str]: + """Parses a annotation path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/annotations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def collector_path( + project: str, + location: str, + collector: str, + ) -> str: + """Returns a fully-qualified collector string.""" + return "projects/{project}/locations/{location}/collectors/{collector}".format( + project=project, + location=location, + collector=collector, + ) + + @staticmethod + def parse_collector_path(path: str) -> Dict[str, str]: + """Parses a collector path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/collectors/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RapidMigrationAssessmentTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the rapid migration assessment client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RapidMigrationAssessmentTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RapidMigrationAssessmentTransport): + # transport is a RapidMigrationAssessmentTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_collector( + self, + request: Optional[ + Union[rapidmigrationassessment.CreateCollectorRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + collector: Optional[api_entities.Collector] = None, + collector_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a Collector to manage the on-prem appliance + which collects information about Customer assets. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + def sample_create_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.CreateCollectorRequest( + parent="parent_value", + collector_id="collector_id_value", + ) + + # Make the request + operation = client.create_collector(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.rapidmigrationassessment_v1.types.CreateCollectorRequest, dict]): + The request object. Message for creating a Collector. + parent (str): + Required. Name of the parent + (project+location). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + collector (google.cloud.rapidmigrationassessment_v1.types.Collector): + Required. The resource being created. + This corresponds to the ``collector`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + collector_id (str): + Required. Id of the requesting + object. + + This corresponds to the ``collector_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.rapidmigrationassessment_v1.types.Collector` + Message describing Collector object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, collector, collector_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a rapidmigrationassessment.CreateCollectorRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, rapidmigrationassessment.CreateCollectorRequest): + request = rapidmigrationassessment.CreateCollectorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if collector is not None: + request.collector = collector + if collector_id is not None: + request.collector_id = collector_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_collector] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + api_entities.Collector, + metadata_type=rapidmigrationassessment.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_annotation( + self, + request: Optional[ + Union[rapidmigrationassessment.CreateAnnotationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + annotation: Optional[api_entities.Annotation] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates an Annotation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + def sample_create_annotation(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.CreateAnnotationRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_annotation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.rapidmigrationassessment_v1.types.CreateAnnotationRequest, dict]): + The request object. Message for creating an AnnotationS. + parent (str): + Required. Name of the parent + (project+location). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + annotation (google.cloud.rapidmigrationassessment_v1.types.Annotation): + Required. The resource being created. + This corresponds to the ``annotation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.rapidmigrationassessment_v1.types.Annotation` + Message describing an Annotation + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, annotation]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a rapidmigrationassessment.CreateAnnotationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, rapidmigrationassessment.CreateAnnotationRequest): + request = rapidmigrationassessment.CreateAnnotationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if annotation is not None: + request.annotation = annotation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_annotation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + api_entities.Annotation, + metadata_type=rapidmigrationassessment.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_annotation( + self, + request: Optional[ + Union[rapidmigrationassessment.GetAnnotationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> api_entities.Annotation: + r"""Gets details of a single Annotation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + def sample_get_annotation(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.GetAnnotationRequest( + name="name_value", + ) + + # Make the request + response = client.get_annotation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.rapidmigrationassessment_v1.types.GetAnnotationRequest, dict]): + The request object. Message for getting a specific + Annotation + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.rapidmigrationassessment_v1.types.Annotation: + Message describing an Annotation + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a rapidmigrationassessment.GetAnnotationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, rapidmigrationassessment.GetAnnotationRequest): + request = rapidmigrationassessment.GetAnnotationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_annotation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_collectors( + self, + request: Optional[ + Union[rapidmigrationassessment.ListCollectorsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCollectorsPager: + r"""Lists Collectors in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + def sample_list_collectors(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.ListCollectorsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_collectors(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.rapidmigrationassessment_v1.types.ListCollectorsRequest, dict]): + The request object. Message for requesting list of + Collectors. + parent (str): + Required. Parent value for + ListCollectorsRequest. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.rapidmigrationassessment_v1.services.rapid_migration_assessment.pagers.ListCollectorsPager: + Message for response to listing + Collectors. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a rapidmigrationassessment.ListCollectorsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, rapidmigrationassessment.ListCollectorsRequest): + request = rapidmigrationassessment.ListCollectorsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_collectors] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCollectorsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_collector( + self, + request: Optional[ + Union[rapidmigrationassessment.GetCollectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> api_entities.Collector: + r"""Gets details of a single Collector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + def sample_get_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.GetCollectorRequest( + name="name_value", + ) + + # Make the request + response = client.get_collector(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.rapidmigrationassessment_v1.types.GetCollectorRequest, dict]): + The request object. Message for getting a specific + Collector. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.rapidmigrationassessment_v1.types.Collector: + Message describing Collector object. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a rapidmigrationassessment.GetCollectorRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, rapidmigrationassessment.GetCollectorRequest): + request = rapidmigrationassessment.GetCollectorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_collector] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_collector( + self, + request: Optional[ + Union[rapidmigrationassessment.UpdateCollectorRequest, dict] + ] = None, + *, + collector: Optional[api_entities.Collector] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single Collector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + def sample_update_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.UpdateCollectorRequest( + ) + + # Make the request + operation = client.update_collector(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.rapidmigrationassessment_v1.types.UpdateCollectorRequest, dict]): + The request object. Message for updating a Collector. + collector (google.cloud.rapidmigrationassessment_v1.types.Collector): + Required. The resource being updated. + This corresponds to the ``collector`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Collector resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.rapidmigrationassessment_v1.types.Collector` + Message describing Collector object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([collector, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a rapidmigrationassessment.UpdateCollectorRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, rapidmigrationassessment.UpdateCollectorRequest): + request = rapidmigrationassessment.UpdateCollectorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if collector is not None: + request.collector = collector + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_collector] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("collector.name", request.collector.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + api_entities.Collector, + metadata_type=rapidmigrationassessment.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_collector( + self, + request: Optional[ + Union[rapidmigrationassessment.DeleteCollectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Collector - changes state of + collector to "Deleting". + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + def sample_delete_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.DeleteCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_collector(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.rapidmigrationassessment_v1.types.DeleteCollectorRequest, dict]): + The request object. Message for deleting a Collector. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.rapidmigrationassessment_v1.types.Collector` + Message describing Collector object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a rapidmigrationassessment.DeleteCollectorRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, rapidmigrationassessment.DeleteCollectorRequest): + request = rapidmigrationassessment.DeleteCollectorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_collector] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + api_entities.Collector, + metadata_type=rapidmigrationassessment.OperationMetadata, + ) + + # Done; return the response. + return response + + def resume_collector( + self, + request: Optional[ + Union[rapidmigrationassessment.ResumeCollectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Resumes the given collector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + def sample_resume_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.ResumeCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.resume_collector(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.rapidmigrationassessment_v1.types.ResumeCollectorRequest, dict]): + The request object. Message for resuming a Collector. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.rapidmigrationassessment_v1.types.Collector` + Message describing Collector object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a rapidmigrationassessment.ResumeCollectorRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, rapidmigrationassessment.ResumeCollectorRequest): + request = rapidmigrationassessment.ResumeCollectorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_collector] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + api_entities.Collector, + metadata_type=rapidmigrationassessment.OperationMetadata, + ) + + # Done; return the response. + return response + + def register_collector( + self, + request: Optional[ + Union[rapidmigrationassessment.RegisterCollectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Registers the given collector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + def sample_register_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.RegisterCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.register_collector(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.rapidmigrationassessment_v1.types.RegisterCollectorRequest, dict]): + The request object. Message for registering a Collector. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.rapidmigrationassessment_v1.types.Collector` + Message describing Collector object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a rapidmigrationassessment.RegisterCollectorRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, rapidmigrationassessment.RegisterCollectorRequest): + request = rapidmigrationassessment.RegisterCollectorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.register_collector] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + api_entities.Collector, + metadata_type=rapidmigrationassessment.OperationMetadata, + ) + + # Done; return the response. + return response + + def pause_collector( + self, + request: Optional[ + Union[rapidmigrationassessment.PauseCollectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Pauses the given collector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import rapidmigrationassessment_v1 + + def sample_pause_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.PauseCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.pause_collector(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.rapidmigrationassessment_v1.types.PauseCollectorRequest, dict]): + The request object. Message for pausing a Collector. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.rapidmigrationassessment_v1.types.Collector` + Message describing Collector object. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a rapidmigrationassessment.PauseCollectorRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, rapidmigrationassessment.PauseCollectorRequest): + request = rapidmigrationassessment.PauseCollectorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pause_collector] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + api_entities.Collector, + metadata_type=rapidmigrationassessment.OperationMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RapidMigrationAssessmentClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("RapidMigrationAssessmentClient",) diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/pagers.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/pagers.py new file mode 100644 index 000000000000..4f424c5ef21f --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/pagers.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.rapidmigrationassessment_v1.types import ( + api_entities, + rapidmigrationassessment, +) + + +class ListCollectorsPager: + """A pager for iterating through ``list_collectors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.rapidmigrationassessment_v1.types.ListCollectorsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``collectors`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCollectors`` requests and continue to iterate + through the ``collectors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.rapidmigrationassessment_v1.types.ListCollectorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., rapidmigrationassessment.ListCollectorsResponse], + request: rapidmigrationassessment.ListCollectorsRequest, + response: rapidmigrationassessment.ListCollectorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.rapidmigrationassessment_v1.types.ListCollectorsRequest): + The initial request object. + response (google.cloud.rapidmigrationassessment_v1.types.ListCollectorsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = rapidmigrationassessment.ListCollectorsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[rapidmigrationassessment.ListCollectorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[api_entities.Collector]: + for page in self.pages: + yield from page.collectors + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListCollectorsAsyncPager: + """A pager for iterating through ``list_collectors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.rapidmigrationassessment_v1.types.ListCollectorsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``collectors`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListCollectors`` requests and continue to iterate + through the ``collectors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.rapidmigrationassessment_v1.types.ListCollectorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[rapidmigrationassessment.ListCollectorsResponse] + ], + request: rapidmigrationassessment.ListCollectorsRequest, + response: rapidmigrationassessment.ListCollectorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.rapidmigrationassessment_v1.types.ListCollectorsRequest): + The initial request object. + response (google.cloud.rapidmigrationassessment_v1.types.ListCollectorsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = rapidmigrationassessment.ListCollectorsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[rapidmigrationassessment.ListCollectorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[api_entities.Collector]: + async def async_generator(): + async for page in self.pages: + for response in page.collectors: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/transports/__init__.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/transports/__init__.py new file mode 100644 index 000000000000..efd12892e3fc --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RapidMigrationAssessmentTransport +from .grpc import RapidMigrationAssessmentGrpcTransport +from .grpc_asyncio import RapidMigrationAssessmentGrpcAsyncIOTransport +from .rest import ( + RapidMigrationAssessmentRestInterceptor, + RapidMigrationAssessmentRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[RapidMigrationAssessmentTransport]] +_transport_registry["grpc"] = RapidMigrationAssessmentGrpcTransport +_transport_registry["grpc_asyncio"] = RapidMigrationAssessmentGrpcAsyncIOTransport +_transport_registry["rest"] = RapidMigrationAssessmentRestTransport + +__all__ = ( + "RapidMigrationAssessmentTransport", + "RapidMigrationAssessmentGrpcTransport", + "RapidMigrationAssessmentGrpcAsyncIOTransport", + "RapidMigrationAssessmentRestTransport", + "RapidMigrationAssessmentRestInterceptor", +) diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/transports/base.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/transports/base.py new file mode 100644 index 000000000000..627d1bd76779 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/transports/base.py @@ -0,0 +1,378 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.rapidmigrationassessment_v1 import gapic_version as package_version +from google.cloud.rapidmigrationassessment_v1.types import ( + api_entities, + rapidmigrationassessment, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class RapidMigrationAssessmentTransport(abc.ABC): + """Abstract transport class for RapidMigrationAssessment.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "rapidmigrationassessment.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_collector: gapic_v1.method.wrap_method( + self.create_collector, + default_timeout=60.0, + client_info=client_info, + ), + self.create_annotation: gapic_v1.method.wrap_method( + self.create_annotation, + default_timeout=60.0, + client_info=client_info, + ), + self.get_annotation: gapic_v1.method.wrap_method( + self.get_annotation, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_collectors: gapic_v1.method.wrap_method( + self.list_collectors, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_collector: gapic_v1.method.wrap_method( + self.get_collector, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_collector: gapic_v1.method.wrap_method( + self.update_collector, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_collector: gapic_v1.method.wrap_method( + self.delete_collector, + default_timeout=60.0, + client_info=client_info, + ), + self.resume_collector: gapic_v1.method.wrap_method( + self.resume_collector, + default_timeout=60.0, + client_info=client_info, + ), + self.register_collector: gapic_v1.method.wrap_method( + self.register_collector, + default_timeout=60.0, + client_info=client_info, + ), + self.pause_collector: gapic_v1.method.wrap_method( + self.pause_collector, + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.CreateCollectorRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_annotation( + self, + ) -> Callable[ + [rapidmigrationassessment.CreateAnnotationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_annotation( + self, + ) -> Callable[ + [rapidmigrationassessment.GetAnnotationRequest], + Union[api_entities.Annotation, Awaitable[api_entities.Annotation]], + ]: + raise NotImplementedError() + + @property + def list_collectors( + self, + ) -> Callable[ + [rapidmigrationassessment.ListCollectorsRequest], + Union[ + rapidmigrationassessment.ListCollectorsResponse, + Awaitable[rapidmigrationassessment.ListCollectorsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.GetCollectorRequest], + Union[api_entities.Collector, Awaitable[api_entities.Collector]], + ]: + raise NotImplementedError() + + @property + def update_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.UpdateCollectorRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.DeleteCollectorRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def resume_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.ResumeCollectorRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def register_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.RegisterCollectorRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def pause_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.PauseCollectorRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("RapidMigrationAssessmentTransport",) diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/transports/grpc.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/transports/grpc.py new file mode 100644 index 000000000000..8201c44fa503 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/transports/grpc.py @@ -0,0 +1,648 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.rapidmigrationassessment_v1.types import ( + api_entities, + rapidmigrationassessment, +) + +from .base import DEFAULT_CLIENT_INFO, RapidMigrationAssessmentTransport + + +class RapidMigrationAssessmentGrpcTransport(RapidMigrationAssessmentTransport): + """gRPC backend transport for RapidMigrationAssessment. + + Rapid Migration Assessment service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "rapidmigrationassessment.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "rapidmigrationassessment.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.CreateCollectorRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create collector method over gRPC. + + Create a Collector to manage the on-prem appliance + which collects information about Customer assets. + + Returns: + Callable[[~.CreateCollectorRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_collector" not in self._stubs: + self._stubs["create_collector"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/CreateCollector", + request_serializer=rapidmigrationassessment.CreateCollectorRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_collector"] + + @property + def create_annotation( + self, + ) -> Callable[ + [rapidmigrationassessment.CreateAnnotationRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create annotation method over gRPC. + + Creates an Annotation + + Returns: + Callable[[~.CreateAnnotationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_annotation" not in self._stubs: + self._stubs["create_annotation"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/CreateAnnotation", + request_serializer=rapidmigrationassessment.CreateAnnotationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_annotation"] + + @property + def get_annotation( + self, + ) -> Callable[ + [rapidmigrationassessment.GetAnnotationRequest], api_entities.Annotation + ]: + r"""Return a callable for the get annotation method over gRPC. + + Gets details of a single Annotation. + + Returns: + Callable[[~.GetAnnotationRequest], + ~.Annotation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_annotation" not in self._stubs: + self._stubs["get_annotation"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/GetAnnotation", + request_serializer=rapidmigrationassessment.GetAnnotationRequest.serialize, + response_deserializer=api_entities.Annotation.deserialize, + ) + return self._stubs["get_annotation"] + + @property + def list_collectors( + self, + ) -> Callable[ + [rapidmigrationassessment.ListCollectorsRequest], + rapidmigrationassessment.ListCollectorsResponse, + ]: + r"""Return a callable for the list collectors method over gRPC. + + Lists Collectors in a given project and location. + + Returns: + Callable[[~.ListCollectorsRequest], + ~.ListCollectorsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_collectors" not in self._stubs: + self._stubs["list_collectors"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/ListCollectors", + request_serializer=rapidmigrationassessment.ListCollectorsRequest.serialize, + response_deserializer=rapidmigrationassessment.ListCollectorsResponse.deserialize, + ) + return self._stubs["list_collectors"] + + @property + def get_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.GetCollectorRequest], api_entities.Collector + ]: + r"""Return a callable for the get collector method over gRPC. + + Gets details of a single Collector. + + Returns: + Callable[[~.GetCollectorRequest], + ~.Collector]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_collector" not in self._stubs: + self._stubs["get_collector"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/GetCollector", + request_serializer=rapidmigrationassessment.GetCollectorRequest.serialize, + response_deserializer=api_entities.Collector.deserialize, + ) + return self._stubs["get_collector"] + + @property + def update_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.UpdateCollectorRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update collector method over gRPC. + + Updates the parameters of a single Collector. + + Returns: + Callable[[~.UpdateCollectorRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_collector" not in self._stubs: + self._stubs["update_collector"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/UpdateCollector", + request_serializer=rapidmigrationassessment.UpdateCollectorRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_collector"] + + @property + def delete_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.DeleteCollectorRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete collector method over gRPC. + + Deletes a single Collector - changes state of + collector to "Deleting". + + Returns: + Callable[[~.DeleteCollectorRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_collector" not in self._stubs: + self._stubs["delete_collector"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/DeleteCollector", + request_serializer=rapidmigrationassessment.DeleteCollectorRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_collector"] + + @property + def resume_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.ResumeCollectorRequest], operations_pb2.Operation + ]: + r"""Return a callable for the resume collector method over gRPC. + + Resumes the given collector. + + Returns: + Callable[[~.ResumeCollectorRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_collector" not in self._stubs: + self._stubs["resume_collector"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/ResumeCollector", + request_serializer=rapidmigrationassessment.ResumeCollectorRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["resume_collector"] + + @property + def register_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.RegisterCollectorRequest], operations_pb2.Operation + ]: + r"""Return a callable for the register collector method over gRPC. + + Registers the given collector. + + Returns: + Callable[[~.RegisterCollectorRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "register_collector" not in self._stubs: + self._stubs["register_collector"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/RegisterCollector", + request_serializer=rapidmigrationassessment.RegisterCollectorRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["register_collector"] + + @property + def pause_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.PauseCollectorRequest], operations_pb2.Operation + ]: + r"""Return a callable for the pause collector method over gRPC. + + Pauses the given collector. + + Returns: + Callable[[~.PauseCollectorRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_collector" not in self._stubs: + self._stubs["pause_collector"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/PauseCollector", + request_serializer=rapidmigrationassessment.PauseCollectorRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["pause_collector"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("RapidMigrationAssessmentGrpcTransport",) diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/transports/grpc_asyncio.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/transports/grpc_asyncio.py new file mode 100644 index 000000000000..88f282f5afe7 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/transports/grpc_asyncio.py @@ -0,0 +1,658 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.rapidmigrationassessment_v1.types import ( + api_entities, + rapidmigrationassessment, +) + +from .base import DEFAULT_CLIENT_INFO, RapidMigrationAssessmentTransport +from .grpc import RapidMigrationAssessmentGrpcTransport + + +class RapidMigrationAssessmentGrpcAsyncIOTransport(RapidMigrationAssessmentTransport): + """gRPC AsyncIO backend transport for RapidMigrationAssessment. + + Rapid Migration Assessment service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "rapidmigrationassessment.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "rapidmigrationassessment.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.CreateCollectorRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create collector method over gRPC. + + Create a Collector to manage the on-prem appliance + which collects information about Customer assets. + + Returns: + Callable[[~.CreateCollectorRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_collector" not in self._stubs: + self._stubs["create_collector"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/CreateCollector", + request_serializer=rapidmigrationassessment.CreateCollectorRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_collector"] + + @property + def create_annotation( + self, + ) -> Callable[ + [rapidmigrationassessment.CreateAnnotationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create annotation method over gRPC. + + Creates an Annotation + + Returns: + Callable[[~.CreateAnnotationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_annotation" not in self._stubs: + self._stubs["create_annotation"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/CreateAnnotation", + request_serializer=rapidmigrationassessment.CreateAnnotationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_annotation"] + + @property + def get_annotation( + self, + ) -> Callable[ + [rapidmigrationassessment.GetAnnotationRequest], + Awaitable[api_entities.Annotation], + ]: + r"""Return a callable for the get annotation method over gRPC. + + Gets details of a single Annotation. + + Returns: + Callable[[~.GetAnnotationRequest], + Awaitable[~.Annotation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_annotation" not in self._stubs: + self._stubs["get_annotation"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/GetAnnotation", + request_serializer=rapidmigrationassessment.GetAnnotationRequest.serialize, + response_deserializer=api_entities.Annotation.deserialize, + ) + return self._stubs["get_annotation"] + + @property + def list_collectors( + self, + ) -> Callable[ + [rapidmigrationassessment.ListCollectorsRequest], + Awaitable[rapidmigrationassessment.ListCollectorsResponse], + ]: + r"""Return a callable for the list collectors method over gRPC. + + Lists Collectors in a given project and location. + + Returns: + Callable[[~.ListCollectorsRequest], + Awaitable[~.ListCollectorsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_collectors" not in self._stubs: + self._stubs["list_collectors"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/ListCollectors", + request_serializer=rapidmigrationassessment.ListCollectorsRequest.serialize, + response_deserializer=rapidmigrationassessment.ListCollectorsResponse.deserialize, + ) + return self._stubs["list_collectors"] + + @property + def get_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.GetCollectorRequest], + Awaitable[api_entities.Collector], + ]: + r"""Return a callable for the get collector method over gRPC. + + Gets details of a single Collector. + + Returns: + Callable[[~.GetCollectorRequest], + Awaitable[~.Collector]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_collector" not in self._stubs: + self._stubs["get_collector"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/GetCollector", + request_serializer=rapidmigrationassessment.GetCollectorRequest.serialize, + response_deserializer=api_entities.Collector.deserialize, + ) + return self._stubs["get_collector"] + + @property + def update_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.UpdateCollectorRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update collector method over gRPC. + + Updates the parameters of a single Collector. + + Returns: + Callable[[~.UpdateCollectorRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_collector" not in self._stubs: + self._stubs["update_collector"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/UpdateCollector", + request_serializer=rapidmigrationassessment.UpdateCollectorRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_collector"] + + @property + def delete_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.DeleteCollectorRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete collector method over gRPC. + + Deletes a single Collector - changes state of + collector to "Deleting". + + Returns: + Callable[[~.DeleteCollectorRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_collector" not in self._stubs: + self._stubs["delete_collector"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/DeleteCollector", + request_serializer=rapidmigrationassessment.DeleteCollectorRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_collector"] + + @property + def resume_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.ResumeCollectorRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the resume collector method over gRPC. + + Resumes the given collector. + + Returns: + Callable[[~.ResumeCollectorRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_collector" not in self._stubs: + self._stubs["resume_collector"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/ResumeCollector", + request_serializer=rapidmigrationassessment.ResumeCollectorRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["resume_collector"] + + @property + def register_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.RegisterCollectorRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the register collector method over gRPC. + + Registers the given collector. + + Returns: + Callable[[~.RegisterCollectorRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "register_collector" not in self._stubs: + self._stubs["register_collector"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/RegisterCollector", + request_serializer=rapidmigrationassessment.RegisterCollectorRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["register_collector"] + + @property + def pause_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.PauseCollectorRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the pause collector method over gRPC. + + Pauses the given collector. + + Returns: + Callable[[~.PauseCollectorRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_collector" not in self._stubs: + self._stubs["pause_collector"] = self.grpc_channel.unary_unary( + "/google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment/PauseCollector", + request_serializer=rapidmigrationassessment.PauseCollectorRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["pause_collector"] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("RapidMigrationAssessmentGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/transports/rest.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/transports/rest.py new file mode 100644 index 000000000000..a9dabc884931 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/services/rapid_migration_assessment/transports/rest.py @@ -0,0 +1,2146 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.rapidmigrationassessment_v1.types import ( + api_entities, + rapidmigrationassessment, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import RapidMigrationAssessmentTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RapidMigrationAssessmentRestInterceptor: + """Interceptor for RapidMigrationAssessment. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RapidMigrationAssessmentRestTransport. + + .. code-block:: python + class MyCustomRapidMigrationAssessmentInterceptor(RapidMigrationAssessmentRestInterceptor): + def pre_create_annotation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_annotation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_collector(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_collector(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_collector(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_collector(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_annotation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_annotation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_collector(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_collector(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_collectors(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_collectors(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_pause_collector(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_pause_collector(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_register_collector(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_register_collector(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_resume_collector(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_resume_collector(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_collector(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_collector(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RapidMigrationAssessmentRestTransport(interceptor=MyCustomRapidMigrationAssessmentInterceptor()) + client = RapidMigrationAssessmentClient(transport=transport) + + + """ + + def pre_create_annotation( + self, + request: rapidmigrationassessment.CreateAnnotationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + rapidmigrationassessment.CreateAnnotationRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_annotation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_create_annotation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_annotation + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + def pre_create_collector( + self, + request: rapidmigrationassessment.CreateCollectorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + rapidmigrationassessment.CreateCollectorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_collector + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_create_collector( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_collector + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + def pre_delete_collector( + self, + request: rapidmigrationassessment.DeleteCollectorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + rapidmigrationassessment.DeleteCollectorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_collector + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_delete_collector( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_collector + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + def pre_get_annotation( + self, + request: rapidmigrationassessment.GetAnnotationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + rapidmigrationassessment.GetAnnotationRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_annotation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_get_annotation( + self, response: api_entities.Annotation + ) -> api_entities.Annotation: + """Post-rpc interceptor for get_annotation + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + def pre_get_collector( + self, + request: rapidmigrationassessment.GetCollectorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[rapidmigrationassessment.GetCollectorRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_collector + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_get_collector( + self, response: api_entities.Collector + ) -> api_entities.Collector: + """Post-rpc interceptor for get_collector + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + def pre_list_collectors( + self, + request: rapidmigrationassessment.ListCollectorsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + rapidmigrationassessment.ListCollectorsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_collectors + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_list_collectors( + self, response: rapidmigrationassessment.ListCollectorsResponse + ) -> rapidmigrationassessment.ListCollectorsResponse: + """Post-rpc interceptor for list_collectors + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + def pre_pause_collector( + self, + request: rapidmigrationassessment.PauseCollectorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + rapidmigrationassessment.PauseCollectorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for pause_collector + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_pause_collector( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for pause_collector + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + def pre_register_collector( + self, + request: rapidmigrationassessment.RegisterCollectorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + rapidmigrationassessment.RegisterCollectorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for register_collector + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_register_collector( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for register_collector + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + def pre_resume_collector( + self, + request: rapidmigrationassessment.ResumeCollectorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + rapidmigrationassessment.ResumeCollectorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for resume_collector + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_resume_collector( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for resume_collector + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + def pre_update_collector( + self, + request: rapidmigrationassessment.UpdateCollectorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + rapidmigrationassessment.UpdateCollectorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_collector + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_update_collector( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_collector + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the RapidMigrationAssessment server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the RapidMigrationAssessment server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RapidMigrationAssessmentRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RapidMigrationAssessmentRestInterceptor + + +class RapidMigrationAssessmentRestTransport(RapidMigrationAssessmentTransport): + """REST backend transport for RapidMigrationAssessment. + + Rapid Migration Assessment service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "rapidmigrationassessment.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[RapidMigrationAssessmentRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RapidMigrationAssessmentRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateAnnotation(RapidMigrationAssessmentRestStub): + def __hash__(self): + return hash("CreateAnnotation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: rapidmigrationassessment.CreateAnnotationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create annotation method over HTTP. + + Args: + request (~.rapidmigrationassessment.CreateAnnotationRequest): + The request object. Message for creating an AnnotationS. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/annotations", + "body": "annotation", + }, + ] + request, metadata = self._interceptor.pre_create_annotation( + request, metadata + ) + pb_request = rapidmigrationassessment.CreateAnnotationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_annotation(resp) + return resp + + class _CreateCollector(RapidMigrationAssessmentRestStub): + def __hash__(self): + return hash("CreateCollector") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "collectorId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: rapidmigrationassessment.CreateCollectorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create collector method over HTTP. + + Args: + request (~.rapidmigrationassessment.CreateCollectorRequest): + The request object. Message for creating a Collector. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/collectors", + "body": "collector", + }, + ] + request, metadata = self._interceptor.pre_create_collector( + request, metadata + ) + pb_request = rapidmigrationassessment.CreateCollectorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_collector(resp) + return resp + + class _DeleteCollector(RapidMigrationAssessmentRestStub): + def __hash__(self): + return hash("DeleteCollector") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: rapidmigrationassessment.DeleteCollectorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete collector method over HTTP. + + Args: + request (~.rapidmigrationassessment.DeleteCollectorRequest): + The request object. Message for deleting a Collector. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/collectors/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_collector( + request, metadata + ) + pb_request = rapidmigrationassessment.DeleteCollectorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_collector(resp) + return resp + + class _GetAnnotation(RapidMigrationAssessmentRestStub): + def __hash__(self): + return hash("GetAnnotation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: rapidmigrationassessment.GetAnnotationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> api_entities.Annotation: + r"""Call the get annotation method over HTTP. + + Args: + request (~.rapidmigrationassessment.GetAnnotationRequest): + The request object. Message for getting a specific + Annotation + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.api_entities.Annotation: + Message describing an Annotation + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/annotations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_annotation(request, metadata) + pb_request = rapidmigrationassessment.GetAnnotationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = api_entities.Annotation() + pb_resp = api_entities.Annotation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_annotation(resp) + return resp + + class _GetCollector(RapidMigrationAssessmentRestStub): + def __hash__(self): + return hash("GetCollector") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: rapidmigrationassessment.GetCollectorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> api_entities.Collector: + r"""Call the get collector method over HTTP. + + Args: + request (~.rapidmigrationassessment.GetCollectorRequest): + The request object. Message for getting a specific + Collector. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.api_entities.Collector: + Message describing Collector object. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collectors/*}", + }, + ] + request, metadata = self._interceptor.pre_get_collector(request, metadata) + pb_request = rapidmigrationassessment.GetCollectorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = api_entities.Collector() + pb_resp = api_entities.Collector.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_collector(resp) + return resp + + class _ListCollectors(RapidMigrationAssessmentRestStub): + def __hash__(self): + return hash("ListCollectors") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: rapidmigrationassessment.ListCollectorsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rapidmigrationassessment.ListCollectorsResponse: + r"""Call the list collectors method over HTTP. + + Args: + request (~.rapidmigrationassessment.ListCollectorsRequest): + The request object. Message for requesting list of + Collectors. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.rapidmigrationassessment.ListCollectorsResponse: + Message for response to listing + Collectors. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/collectors", + }, + ] + request, metadata = self._interceptor.pre_list_collectors(request, metadata) + pb_request = rapidmigrationassessment.ListCollectorsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rapidmigrationassessment.ListCollectorsResponse() + pb_resp = rapidmigrationassessment.ListCollectorsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_collectors(resp) + return resp + + class _PauseCollector(RapidMigrationAssessmentRestStub): + def __hash__(self): + return hash("PauseCollector") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: rapidmigrationassessment.PauseCollectorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the pause collector method over HTTP. + + Args: + request (~.rapidmigrationassessment.PauseCollectorRequest): + The request object. Message for pausing a Collector. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collectors/*}:pause", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_pause_collector(request, metadata) + pb_request = rapidmigrationassessment.PauseCollectorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_pause_collector(resp) + return resp + + class _RegisterCollector(RapidMigrationAssessmentRestStub): + def __hash__(self): + return hash("RegisterCollector") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: rapidmigrationassessment.RegisterCollectorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the register collector method over HTTP. + + Args: + request (~.rapidmigrationassessment.RegisterCollectorRequest): + The request object. Message for registering a Collector. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collectors/*}:register", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_register_collector( + request, metadata + ) + pb_request = rapidmigrationassessment.RegisterCollectorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_register_collector(resp) + return resp + + class _ResumeCollector(RapidMigrationAssessmentRestStub): + def __hash__(self): + return hash("ResumeCollector") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: rapidmigrationassessment.ResumeCollectorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the resume collector method over HTTP. + + Args: + request (~.rapidmigrationassessment.ResumeCollectorRequest): + The request object. Message for resuming a Collector. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collectors/*}:resume", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_resume_collector( + request, metadata + ) + pb_request = rapidmigrationassessment.ResumeCollectorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resume_collector(resp) + return resp + + class _UpdateCollector(RapidMigrationAssessmentRestStub): + def __hash__(self): + return hash("UpdateCollector") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: rapidmigrationassessment.UpdateCollectorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update collector method over HTTP. + + Args: + request (~.rapidmigrationassessment.UpdateCollectorRequest): + The request object. Message for updating a Collector. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{collector.name=projects/*/locations/*/collectors/*}", + "body": "collector", + }, + ] + request, metadata = self._interceptor.pre_update_collector( + request, metadata + ) + pb_request = rapidmigrationassessment.UpdateCollectorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_collector(resp) + return resp + + @property + def create_annotation( + self, + ) -> Callable[ + [rapidmigrationassessment.CreateAnnotationRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAnnotation(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.CreateCollectorRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCollector(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.DeleteCollectorRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCollector(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_annotation( + self, + ) -> Callable[ + [rapidmigrationassessment.GetAnnotationRequest], api_entities.Annotation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAnnotation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.GetCollectorRequest], api_entities.Collector + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCollector(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_collectors( + self, + ) -> Callable[ + [rapidmigrationassessment.ListCollectorsRequest], + rapidmigrationassessment.ListCollectorsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCollectors(self._session, self._host, self._interceptor) # type: ignore + + @property + def pause_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.PauseCollectorRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PauseCollector(self._session, self._host, self._interceptor) # type: ignore + + @property + def register_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.RegisterCollectorRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RegisterCollector(self._session, self._host, self._interceptor) # type: ignore + + @property + def resume_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.ResumeCollectorRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ResumeCollector(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_collector( + self, + ) -> Callable[ + [rapidmigrationassessment.UpdateCollectorRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateCollector(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(RapidMigrationAssessmentRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(RapidMigrationAssessmentRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(RapidMigrationAssessmentRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(RapidMigrationAssessmentRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(RapidMigrationAssessmentRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(RapidMigrationAssessmentRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("RapidMigrationAssessmentRestTransport",) diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/types/__init__.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/types/__init__.py new file mode 100644 index 000000000000..080be6533189 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/types/__init__.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .api_entities import Annotation, Collector, GuestOsScan, VSphereScan +from .rapidmigrationassessment import ( + CreateAnnotationRequest, + CreateCollectorRequest, + DeleteCollectorRequest, + GetAnnotationRequest, + GetCollectorRequest, + ListCollectorsRequest, + ListCollectorsResponse, + OperationMetadata, + PauseCollectorRequest, + RegisterCollectorRequest, + ResumeCollectorRequest, + UpdateCollectorRequest, +) + +__all__ = ( + "Annotation", + "Collector", + "GuestOsScan", + "VSphereScan", + "CreateAnnotationRequest", + "CreateCollectorRequest", + "DeleteCollectorRequest", + "GetAnnotationRequest", + "GetCollectorRequest", + "ListCollectorsRequest", + "ListCollectorsResponse", + "OperationMetadata", + "PauseCollectorRequest", + "RegisterCollectorRequest", + "ResumeCollectorRequest", + "UpdateCollectorRequest", +) diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/types/api_entities.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/types/api_entities.py new file mode 100644 index 000000000000..f8112bf1149b --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/types/api_entities.py @@ -0,0 +1,273 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.rapidmigrationassessment.v1", + manifest={ + "GuestOsScan", + "VSphereScan", + "Collector", + "Annotation", + }, +) + + +class GuestOsScan(proto.Message): + r"""Message describing a MC Source of type Guest OS Scan. + + Attributes: + core_source (str): + reference to the corresponding Guest OS Scan + in MC Source. + """ + + core_source: str = proto.Field( + proto.STRING, + number=1, + ) + + +class VSphereScan(proto.Message): + r"""Message describing a MC Source of type VSphere Scan. + + Attributes: + core_source (str): + reference to the corresponding VSphere Scan + in MC Source. + """ + + core_source: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Collector(proto.Message): + r"""Message describing Collector object. + + Attributes: + name (str): + name of resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp. + labels (MutableMapping[str, str]): + Labels as key value pairs. + display_name (str): + User specified name of the Collector. + description (str): + User specified description of the Collector. + service_account (str): + Service Account email used to ingest data to + this Collector. + bucket (str): + Output only. Store cloud storage bucket name + (which is a guid) created with this Collector. + expected_asset_count (int): + User specified expected asset count. + state (google.cloud.rapidmigrationassessment_v1.types.Collector.State): + Output only. State of the Collector. + client_version (str): + Output only. Client version. + guest_os_scan (google.cloud.rapidmigrationassessment_v1.types.GuestOsScan): + Output only. Reference to MC Source Guest Os + Scan. + vsphere_scan (google.cloud.rapidmigrationassessment_v1.types.VSphereScan): + Output only. Reference to MC Source vsphere_scan. + collection_days (int): + How many days to collect data. + eula_uri (str): + Uri for EULA (End User License Agreement) + from customer. + """ + + class State(proto.Enum): + r"""-- Using suggestion from API Linter Analyzer for nesting enum -- -- + https://linter.aip.dev/216/nesting -- State of a Collector + (server_side). States are used for internal purposes and named to + keep convention of legacy product: + https://cloud.google.com/migrate/stratozone/docs/about-stratoprobe. + + Values: + STATE_UNSPECIFIED (0): + Collector state is not recognized. + STATE_INITIALIZING (1): + Collector started to create, but hasn't been + completed MC source creation and db object + creation. + STATE_READY_TO_USE (2): + Collector has been created, MC source + creation and db object creation completed. + STATE_REGISTERED (3): + Collector client has been registered with + client. + STATE_ACTIVE (4): + Collector client is actively scanning. + STATE_PAUSED (5): + Collector is not actively scanning. + STATE_DELETING (6): + Collector is starting background job for + deletion. + STATE_DECOMMISSIONED (7): + Collector completed all tasks for deletion. + STATE_ERROR (8): + Collector is in error state. + """ + STATE_UNSPECIFIED = 0 + STATE_INITIALIZING = 1 + STATE_READY_TO_USE = 2 + STATE_REGISTERED = 3 + STATE_ACTIVE = 4 + STATE_PAUSED = 5 + STATE_DELETING = 6 + STATE_DECOMMISSIONED = 7 + STATE_ERROR = 8 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + description: str = proto.Field( + proto.STRING, + number=6, + ) + service_account: str = proto.Field( + proto.STRING, + number=7, + ) + bucket: str = proto.Field( + proto.STRING, + number=8, + ) + expected_asset_count: int = proto.Field( + proto.INT64, + number=9, + ) + state: State = proto.Field( + proto.ENUM, + number=10, + enum=State, + ) + client_version: str = proto.Field( + proto.STRING, + number=11, + ) + guest_os_scan: "GuestOsScan" = proto.Field( + proto.MESSAGE, + number=12, + message="GuestOsScan", + ) + vsphere_scan: "VSphereScan" = proto.Field( + proto.MESSAGE, + number=13, + message="VSphereScan", + ) + collection_days: int = proto.Field( + proto.INT32, + number=14, + ) + eula_uri: str = proto.Field( + proto.STRING, + number=15, + ) + + +class Annotation(proto.Message): + r"""Message describing an Annotation + + Attributes: + name (str): + name of resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp. + labels (MutableMapping[str, str]): + Labels as key value pairs. + type_ (google.cloud.rapidmigrationassessment_v1.types.Annotation.Type): + Type of an annotation. + """ + + class Type(proto.Enum): + r"""Types for project level setting. + + Values: + TYPE_UNSPECIFIED (0): + Unknown type + TYPE_LEGACY_EXPORT_CONSENT (1): + Indicates that this project has opted into + StratoZone export. + TYPE_QWIKLAB (2): + Indicates that this project is created by + Qwiklab. + """ + TYPE_UNSPECIFIED = 0 + TYPE_LEGACY_EXPORT_CONSENT = 1 + TYPE_QWIKLAB = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + type_: Type = proto.Field( + proto.ENUM, + number=5, + enum=Type, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/types/rapidmigrationassessment.py b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/types/rapidmigrationassessment.py new file mode 100644 index 000000000000..24b29d6fe6b4 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/google/cloud/rapidmigrationassessment_v1/types/rapidmigrationassessment.py @@ -0,0 +1,466 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.rapidmigrationassessment_v1.types import api_entities + +__protobuf__ = proto.module( + package="google.cloud.rapidmigrationassessment.v1", + manifest={ + "CreateAnnotationRequest", + "GetAnnotationRequest", + "CreateCollectorRequest", + "ListCollectorsRequest", + "ListCollectorsResponse", + "GetCollectorRequest", + "DeleteCollectorRequest", + "UpdateCollectorRequest", + "ResumeCollectorRequest", + "RegisterCollectorRequest", + "PauseCollectorRequest", + "OperationMetadata", + }, +) + + +class CreateAnnotationRequest(proto.Message): + r"""Message for creating an AnnotationS. + + Attributes: + parent (str): + Required. Name of the parent + (project+location). + annotation (google.cloud.rapidmigrationassessment_v1.types.Annotation): + Required. The resource being created. + request_id (str): + Optional. An optional request ID to identify + requests. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + annotation: api_entities.Annotation = proto.Field( + proto.MESSAGE, + number=2, + message=api_entities.Annotation, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GetAnnotationRequest(proto.Message): + r"""Message for getting a specific Annotation + + Attributes: + name (str): + Required. Name of the resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateCollectorRequest(proto.Message): + r"""Message for creating a Collector. + + Attributes: + parent (str): + Required. Name of the parent + (project+location). + collector_id (str): + Required. Id of the requesting object. + collector (google.cloud.rapidmigrationassessment_v1.types.Collector): + Required. The resource being created. + request_id (str): + Optional. An optional request ID to identify + requests. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + collector_id: str = proto.Field( + proto.STRING, + number=2, + ) + collector: api_entities.Collector = proto.Field( + proto.MESSAGE, + number=3, + message=api_entities.Collector, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListCollectorsRequest(proto.Message): + r"""Message for requesting list of Collectors. + + Attributes: + parent (str): + Required. Parent value for + ListCollectorsRequest. + page_size (int): + Requested page size. Server may return fewer + items than requested. If unspecified, server + will pick an appropriate default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Filtering results. + order_by (str): + Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListCollectorsResponse(proto.Message): + r"""Message for response to listing Collectors. + + Attributes: + collectors (MutableSequence[google.cloud.rapidmigrationassessment_v1.types.Collector]): + The list of Collectors. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + collectors: MutableSequence[api_entities.Collector] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=api_entities.Collector, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetCollectorRequest(proto.Message): + r"""Message for getting a specific Collector. + + Attributes: + name (str): + Required. Name of the resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteCollectorRequest(proto.Message): + r"""Message for deleting a Collector. + + Attributes: + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateCollectorRequest(proto.Message): + r"""Message for updating a Collector. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Collector resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + collector (google.cloud.rapidmigrationassessment_v1.types.Collector): + Required. The resource being updated. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + collector: api_entities.Collector = proto.Field( + proto.MESSAGE, + number=2, + message=api_entities.Collector, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ResumeCollectorRequest(proto.Message): + r"""Message for resuming a Collector. + + Attributes: + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RegisterCollectorRequest(proto.Message): + r"""Message for registering a Collector. + + Attributes: + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class PauseCollectorRequest(proto.Message): + r"""Message for pausing a Collector. + + Attributes: + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-rapidmigrationassessment/mypy.ini b/packages/google-cloud-rapidmigrationassessment/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-rapidmigrationassessment/noxfile.py b/packages/google-cloud-rapidmigrationassessment/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-cloud-rapidmigrationassessment/renovate.json b/packages/google-cloud-rapidmigrationassessment/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_annotation_async.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_annotation_async.py new file mode 100644 index 000000000000..d30807d96531 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_annotation_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAnnotation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_CreateAnnotation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +async def sample_create_annotation(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.CreateAnnotationRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_annotation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_CreateAnnotation_async] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_annotation_sync.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_annotation_sync.py new file mode 100644 index 000000000000..463741812acd --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_annotation_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAnnotation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_CreateAnnotation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +def sample_create_annotation(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.CreateAnnotationRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_annotation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_CreateAnnotation_sync] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_collector_async.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_collector_async.py new file mode 100644 index 000000000000..5a24b5c90157 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_collector_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCollector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_CreateCollector_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +async def sample_create_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.CreateCollectorRequest( + parent="parent_value", + collector_id="collector_id_value", + ) + + # Make the request + operation = client.create_collector(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_CreateCollector_async] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_collector_sync.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_collector_sync.py new file mode 100644 index 000000000000..dd37afa69a29 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_collector_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCollector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_CreateCollector_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +def sample_create_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.CreateCollectorRequest( + parent="parent_value", + collector_id="collector_id_value", + ) + + # Make the request + operation = client.create_collector(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_CreateCollector_sync] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_delete_collector_async.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_delete_collector_async.py new file mode 100644 index 000000000000..b7d19a58de38 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_delete_collector_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCollector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_DeleteCollector_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +async def sample_delete_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.DeleteCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_collector(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_DeleteCollector_async] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_delete_collector_sync.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_delete_collector_sync.py new file mode 100644 index 000000000000..0aa7ff815299 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_delete_collector_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCollector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_DeleteCollector_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +def sample_delete_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.DeleteCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_collector(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_DeleteCollector_sync] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_annotation_async.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_annotation_async.py new file mode 100644 index 000000000000..91aa3aa8236c --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_annotation_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAnnotation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_GetAnnotation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +async def sample_get_annotation(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.GetAnnotationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_annotation(request=request) + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_GetAnnotation_async] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_annotation_sync.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_annotation_sync.py new file mode 100644 index 000000000000..f045ee791cc3 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_annotation_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAnnotation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_GetAnnotation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +def sample_get_annotation(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.GetAnnotationRequest( + name="name_value", + ) + + # Make the request + response = client.get_annotation(request=request) + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_GetAnnotation_sync] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_collector_async.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_collector_async.py new file mode 100644 index 000000000000..e850b651b86d --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_collector_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCollector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_GetCollector_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +async def sample_get_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.GetCollectorRequest( + name="name_value", + ) + + # Make the request + response = await client.get_collector(request=request) + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_GetCollector_async] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_collector_sync.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_collector_sync.py new file mode 100644 index 000000000000..3721bdec4e30 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_collector_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCollector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_GetCollector_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +def sample_get_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.GetCollectorRequest( + name="name_value", + ) + + # Make the request + response = client.get_collector(request=request) + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_GetCollector_sync] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_list_collectors_async.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_list_collectors_async.py new file mode 100644 index 000000000000..6f16260d23b8 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_list_collectors_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCollectors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_ListCollectors_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +async def sample_list_collectors(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.ListCollectorsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_collectors(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_ListCollectors_async] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_list_collectors_sync.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_list_collectors_sync.py new file mode 100644 index 000000000000..f585655c6910 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_list_collectors_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCollectors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_ListCollectors_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +def sample_list_collectors(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.ListCollectorsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_collectors(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_ListCollectors_sync] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_pause_collector_async.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_pause_collector_async.py new file mode 100644 index 000000000000..37ed9841f7fe --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_pause_collector_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PauseCollector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_PauseCollector_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +async def sample_pause_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.PauseCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.pause_collector(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_PauseCollector_async] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_pause_collector_sync.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_pause_collector_sync.py new file mode 100644 index 000000000000..40e2a5fe5e15 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_pause_collector_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PauseCollector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_PauseCollector_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +def sample_pause_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.PauseCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.pause_collector(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_PauseCollector_sync] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_register_collector_async.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_register_collector_async.py new file mode 100644 index 000000000000..97c05c1a89b7 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_register_collector_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RegisterCollector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_RegisterCollector_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +async def sample_register_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.RegisterCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.register_collector(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_RegisterCollector_async] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_register_collector_sync.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_register_collector_sync.py new file mode 100644 index 000000000000..a0797a79d31a --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_register_collector_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RegisterCollector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_RegisterCollector_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +def sample_register_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.RegisterCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.register_collector(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_RegisterCollector_sync] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_resume_collector_async.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_resume_collector_async.py new file mode 100644 index 000000000000..e222ad967428 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_resume_collector_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ResumeCollector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_ResumeCollector_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +async def sample_resume_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.ResumeCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.resume_collector(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_ResumeCollector_async] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_resume_collector_sync.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_resume_collector_sync.py new file mode 100644 index 000000000000..087cb8984213 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_resume_collector_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ResumeCollector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_ResumeCollector_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +def sample_resume_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.ResumeCollectorRequest( + name="name_value", + ) + + # Make the request + operation = client.resume_collector(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_ResumeCollector_sync] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_update_collector_async.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_update_collector_async.py new file mode 100644 index 000000000000..f8c43b638b83 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_update_collector_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCollector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_UpdateCollector_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +async def sample_update_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.UpdateCollectorRequest( + ) + + # Make the request + operation = client.update_collector(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_UpdateCollector_async] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_update_collector_sync.py b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_update_collector_sync.py new file mode 100644 index 000000000000..65388117a005 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/rapidmigrationassessment_v1_generated_rapid_migration_assessment_update_collector_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCollector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-rapidmigrationassessment + + +# [START rapidmigrationassessment_v1_generated_RapidMigrationAssessment_UpdateCollector_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import rapidmigrationassessment_v1 + + +def sample_update_collector(): + # Create a client + client = rapidmigrationassessment_v1.RapidMigrationAssessmentClient() + + # Initialize request argument(s) + request = rapidmigrationassessment_v1.UpdateCollectorRequest( + ) + + # Make the request + operation = client.update_collector(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END rapidmigrationassessment_v1_generated_RapidMigrationAssessment_UpdateCollector_sync] diff --git a/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/snippet_metadata_google.cloud.rapidmigrationassessment.v1.json b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/snippet_metadata_google.cloud.rapidmigrationassessment.v1.json new file mode 100644 index 000000000000..c385b94c0c47 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/samples/generated_samples/snippet_metadata_google.cloud.rapidmigrationassessment.v1.json @@ -0,0 +1,1657 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.rapidmigrationassessment.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-rapidmigrationassessment", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient", + "shortName": "RapidMigrationAssessmentAsyncClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient.create_annotation", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.CreateAnnotation", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "CreateAnnotation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.CreateAnnotationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "annotation", + "type": "google.cloud.rapidmigrationassessment_v1.types.Annotation" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_annotation" + }, + "description": "Sample for CreateAnnotation", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_annotation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_CreateAnnotation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_annotation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient", + "shortName": "RapidMigrationAssessmentClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient.create_annotation", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.CreateAnnotation", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "CreateAnnotation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.CreateAnnotationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "annotation", + "type": "google.cloud.rapidmigrationassessment_v1.types.Annotation" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_annotation" + }, + "description": "Sample for CreateAnnotation", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_annotation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_CreateAnnotation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_annotation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient", + "shortName": "RapidMigrationAssessmentAsyncClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient.create_collector", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.CreateCollector", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "CreateCollector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.CreateCollectorRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "collector", + "type": "google.cloud.rapidmigrationassessment_v1.types.Collector" + }, + { + "name": "collector_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_collector" + }, + "description": "Sample for CreateCollector", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_collector_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_CreateCollector_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_collector_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient", + "shortName": "RapidMigrationAssessmentClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient.create_collector", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.CreateCollector", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "CreateCollector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.CreateCollectorRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "collector", + "type": "google.cloud.rapidmigrationassessment_v1.types.Collector" + }, + { + "name": "collector_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_collector" + }, + "description": "Sample for CreateCollector", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_collector_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_CreateCollector_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_create_collector_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient", + "shortName": "RapidMigrationAssessmentAsyncClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient.delete_collector", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.DeleteCollector", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "DeleteCollector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.DeleteCollectorRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_collector" + }, + "description": "Sample for DeleteCollector", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_delete_collector_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_DeleteCollector_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_delete_collector_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient", + "shortName": "RapidMigrationAssessmentClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient.delete_collector", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.DeleteCollector", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "DeleteCollector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.DeleteCollectorRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_collector" + }, + "description": "Sample for DeleteCollector", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_delete_collector_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_DeleteCollector_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_delete_collector_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient", + "shortName": "RapidMigrationAssessmentAsyncClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient.get_annotation", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.GetAnnotation", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "GetAnnotation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.GetAnnotationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.rapidmigrationassessment_v1.types.Annotation", + "shortName": "get_annotation" + }, + "description": "Sample for GetAnnotation", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_annotation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_GetAnnotation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_annotation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient", + "shortName": "RapidMigrationAssessmentClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient.get_annotation", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.GetAnnotation", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "GetAnnotation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.GetAnnotationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.rapidmigrationassessment_v1.types.Annotation", + "shortName": "get_annotation" + }, + "description": "Sample for GetAnnotation", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_annotation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_GetAnnotation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_annotation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient", + "shortName": "RapidMigrationAssessmentAsyncClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient.get_collector", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.GetCollector", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "GetCollector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.GetCollectorRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.rapidmigrationassessment_v1.types.Collector", + "shortName": "get_collector" + }, + "description": "Sample for GetCollector", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_collector_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_GetCollector_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_collector_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient", + "shortName": "RapidMigrationAssessmentClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient.get_collector", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.GetCollector", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "GetCollector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.GetCollectorRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.rapidmigrationassessment_v1.types.Collector", + "shortName": "get_collector" + }, + "description": "Sample for GetCollector", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_collector_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_GetCollector_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_get_collector_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient", + "shortName": "RapidMigrationAssessmentAsyncClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient.list_collectors", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.ListCollectors", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "ListCollectors" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.ListCollectorsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.rapidmigrationassessment_v1.services.rapid_migration_assessment.pagers.ListCollectorsAsyncPager", + "shortName": "list_collectors" + }, + "description": "Sample for ListCollectors", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_list_collectors_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_ListCollectors_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_list_collectors_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient", + "shortName": "RapidMigrationAssessmentClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient.list_collectors", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.ListCollectors", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "ListCollectors" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.ListCollectorsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.rapidmigrationassessment_v1.services.rapid_migration_assessment.pagers.ListCollectorsPager", + "shortName": "list_collectors" + }, + "description": "Sample for ListCollectors", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_list_collectors_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_ListCollectors_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_list_collectors_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient", + "shortName": "RapidMigrationAssessmentAsyncClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient.pause_collector", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.PauseCollector", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "PauseCollector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.PauseCollectorRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "pause_collector" + }, + "description": "Sample for PauseCollector", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_pause_collector_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_PauseCollector_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_pause_collector_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient", + "shortName": "RapidMigrationAssessmentClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient.pause_collector", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.PauseCollector", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "PauseCollector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.PauseCollectorRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "pause_collector" + }, + "description": "Sample for PauseCollector", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_pause_collector_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_PauseCollector_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_pause_collector_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient", + "shortName": "RapidMigrationAssessmentAsyncClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient.register_collector", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.RegisterCollector", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "RegisterCollector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.RegisterCollectorRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "register_collector" + }, + "description": "Sample for RegisterCollector", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_register_collector_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_RegisterCollector_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_register_collector_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient", + "shortName": "RapidMigrationAssessmentClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient.register_collector", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.RegisterCollector", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "RegisterCollector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.RegisterCollectorRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "register_collector" + }, + "description": "Sample for RegisterCollector", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_register_collector_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_RegisterCollector_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_register_collector_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient", + "shortName": "RapidMigrationAssessmentAsyncClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient.resume_collector", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.ResumeCollector", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "ResumeCollector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.ResumeCollectorRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "resume_collector" + }, + "description": "Sample for ResumeCollector", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_resume_collector_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_ResumeCollector_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_resume_collector_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient", + "shortName": "RapidMigrationAssessmentClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient.resume_collector", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.ResumeCollector", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "ResumeCollector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.ResumeCollectorRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "resume_collector" + }, + "description": "Sample for ResumeCollector", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_resume_collector_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_ResumeCollector_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_resume_collector_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient", + "shortName": "RapidMigrationAssessmentAsyncClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentAsyncClient.update_collector", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.UpdateCollector", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "UpdateCollector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.UpdateCollectorRequest" + }, + { + "name": "collector", + "type": "google.cloud.rapidmigrationassessment_v1.types.Collector" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_collector" + }, + "description": "Sample for UpdateCollector", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_update_collector_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_UpdateCollector_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_update_collector_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient", + "shortName": "RapidMigrationAssessmentClient" + }, + "fullName": "google.cloud.rapidmigrationassessment_v1.RapidMigrationAssessmentClient.update_collector", + "method": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment.UpdateCollector", + "service": { + "fullName": "google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessment", + "shortName": "RapidMigrationAssessment" + }, + "shortName": "UpdateCollector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.rapidmigrationassessment_v1.types.UpdateCollectorRequest" + }, + { + "name": "collector", + "type": "google.cloud.rapidmigrationassessment_v1.types.Collector" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_collector" + }, + "description": "Sample for UpdateCollector", + "file": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_update_collector_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "rapidmigrationassessment_v1_generated_RapidMigrationAssessment_UpdateCollector_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "rapidmigrationassessment_v1_generated_rapid_migration_assessment_update_collector_sync.py" + } + ] +} diff --git a/packages/google-cloud-rapidmigrationassessment/scripts/decrypt-secrets.sh b/packages/google-cloud-rapidmigrationassessment/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-rapidmigrationassessment/scripts/fixup_rapidmigrationassessment_v1_keywords.py b/packages/google-cloud-rapidmigrationassessment/scripts/fixup_rapidmigrationassessment_v1_keywords.py new file mode 100644 index 000000000000..581a2520bd6f --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/scripts/fixup_rapidmigrationassessment_v1_keywords.py @@ -0,0 +1,185 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class rapidmigrationassessmentCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_annotation': ('parent', 'annotation', 'request_id', ), + 'create_collector': ('parent', 'collector_id', 'collector', 'request_id', ), + 'delete_collector': ('name', 'request_id', ), + 'get_annotation': ('name', ), + 'get_collector': ('name', ), + 'list_collectors': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'pause_collector': ('name', 'request_id', ), + 'register_collector': ('name', 'request_id', ), + 'resume_collector': ('name', 'request_id', ), + 'update_collector': ('update_mask', 'collector', 'request_id', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=rapidmigrationassessmentCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the rapidmigrationassessment client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-rapidmigrationassessment/setup.py b/packages/google-cloud-rapidmigrationassessment/setup.py new file mode 100644 index 000000000000..de155ff92157 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-rapidmigrationassessment" + + +description = "Google Cloud Rapidmigrationassessment API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/rapidmigrationassessment/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-rapidmigrationassessment/testing/.gitignore b/packages/google-cloud-rapidmigrationassessment/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.10.txt b/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.11.txt b/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.12.txt b/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.7.txt b/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.8.txt b/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.9.txt b/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-rapidmigrationassessment/tests/__init__.py b/packages/google-cloud-rapidmigrationassessment/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-rapidmigrationassessment/tests/unit/__init__.py b/packages/google-cloud-rapidmigrationassessment/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/__init__.py b/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/rapidmigrationassessment_v1/__init__.py b/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/rapidmigrationassessment_v1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/rapidmigrationassessment_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/rapidmigrationassessment_v1/test_rapid_migration_assessment.py b/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/rapidmigrationassessment_v1/test_rapid_migration_assessment.py new file mode 100644 index 000000000000..aa13435ab1a0 --- /dev/null +++ b/packages/google-cloud-rapidmigrationassessment/tests/unit/gapic/rapidmigrationassessment_v1/test_rapid_migration_assessment.py @@ -0,0 +1,8392 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.rapidmigrationassessment_v1.services.rapid_migration_assessment import ( + RapidMigrationAssessmentAsyncClient, + RapidMigrationAssessmentClient, + pagers, + transports, +) +from google.cloud.rapidmigrationassessment_v1.types import ( + api_entities, + rapidmigrationassessment, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RapidMigrationAssessmentClient._get_default_mtls_endpoint(None) is None + assert ( + RapidMigrationAssessmentClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + RapidMigrationAssessmentClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + RapidMigrationAssessmentClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RapidMigrationAssessmentClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RapidMigrationAssessmentClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RapidMigrationAssessmentClient, "grpc"), + (RapidMigrationAssessmentAsyncClient, "grpc_asyncio"), + (RapidMigrationAssessmentClient, "rest"), + ], +) +def test_rapid_migration_assessment_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "rapidmigrationassessment.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://rapidmigrationassessment.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.RapidMigrationAssessmentGrpcTransport, "grpc"), + (transports.RapidMigrationAssessmentGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.RapidMigrationAssessmentRestTransport, "rest"), + ], +) +def test_rapid_migration_assessment_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RapidMigrationAssessmentClient, "grpc"), + (RapidMigrationAssessmentAsyncClient, "grpc_asyncio"), + (RapidMigrationAssessmentClient, "rest"), + ], +) +def test_rapid_migration_assessment_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "rapidmigrationassessment.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://rapidmigrationassessment.googleapis.com" + ) + + +def test_rapid_migration_assessment_client_get_transport_class(): + transport = RapidMigrationAssessmentClient.get_transport_class() + available_transports = [ + transports.RapidMigrationAssessmentGrpcTransport, + transports.RapidMigrationAssessmentRestTransport, + ] + assert transport in available_transports + + transport = RapidMigrationAssessmentClient.get_transport_class("grpc") + assert transport == transports.RapidMigrationAssessmentGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + RapidMigrationAssessmentClient, + transports.RapidMigrationAssessmentGrpcTransport, + "grpc", + ), + ( + RapidMigrationAssessmentAsyncClient, + transports.RapidMigrationAssessmentGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + RapidMigrationAssessmentClient, + transports.RapidMigrationAssessmentRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + RapidMigrationAssessmentClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RapidMigrationAssessmentClient), +) +@mock.patch.object( + RapidMigrationAssessmentAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RapidMigrationAssessmentAsyncClient), +) +def test_rapid_migration_assessment_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + RapidMigrationAssessmentClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + RapidMigrationAssessmentClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + RapidMigrationAssessmentClient, + transports.RapidMigrationAssessmentGrpcTransport, + "grpc", + "true", + ), + ( + RapidMigrationAssessmentAsyncClient, + transports.RapidMigrationAssessmentGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + RapidMigrationAssessmentClient, + transports.RapidMigrationAssessmentGrpcTransport, + "grpc", + "false", + ), + ( + RapidMigrationAssessmentAsyncClient, + transports.RapidMigrationAssessmentGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + RapidMigrationAssessmentClient, + transports.RapidMigrationAssessmentRestTransport, + "rest", + "true", + ), + ( + RapidMigrationAssessmentClient, + transports.RapidMigrationAssessmentRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + RapidMigrationAssessmentClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RapidMigrationAssessmentClient), +) +@mock.patch.object( + RapidMigrationAssessmentAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RapidMigrationAssessmentAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_rapid_migration_assessment_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [RapidMigrationAssessmentClient, RapidMigrationAssessmentAsyncClient], +) +@mock.patch.object( + RapidMigrationAssessmentClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RapidMigrationAssessmentClient), +) +@mock.patch.object( + RapidMigrationAssessmentAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RapidMigrationAssessmentAsyncClient), +) +def test_rapid_migration_assessment_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + RapidMigrationAssessmentClient, + transports.RapidMigrationAssessmentGrpcTransport, + "grpc", + ), + ( + RapidMigrationAssessmentAsyncClient, + transports.RapidMigrationAssessmentGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + RapidMigrationAssessmentClient, + transports.RapidMigrationAssessmentRestTransport, + "rest", + ), + ], +) +def test_rapid_migration_assessment_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + RapidMigrationAssessmentClient, + transports.RapidMigrationAssessmentGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + RapidMigrationAssessmentAsyncClient, + transports.RapidMigrationAssessmentGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + RapidMigrationAssessmentClient, + transports.RapidMigrationAssessmentRestTransport, + "rest", + None, + ), + ], +) +def test_rapid_migration_assessment_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_rapid_migration_assessment_client_client_options_from_dict(): + with mock.patch( + "google.cloud.rapidmigrationassessment_v1.services.rapid_migration_assessment.transports.RapidMigrationAssessmentGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = RapidMigrationAssessmentClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + RapidMigrationAssessmentClient, + transports.RapidMigrationAssessmentGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + RapidMigrationAssessmentAsyncClient, + transports.RapidMigrationAssessmentGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_rapid_migration_assessment_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "rapidmigrationassessment.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="rapidmigrationassessment.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.CreateCollectorRequest, + dict, + ], +) +def test_create_collector(request_type, transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.CreateCollectorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_collector_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_collector), "__call__") as call: + client.create_collector() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.CreateCollectorRequest() + + +@pytest.mark.asyncio +async def test_create_collector_async( + transport: str = "grpc_asyncio", + request_type=rapidmigrationassessment.CreateCollectorRequest, +): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.CreateCollectorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_collector_async_from_dict(): + await test_create_collector_async(request_type=dict) + + +def test_create_collector_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.CreateCollectorRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_collector), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_collector_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.CreateCollectorRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_collector), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_collector_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_collector( + parent="parent_value", + collector=api_entities.Collector(name="name_value"), + collector_id="collector_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].collector + mock_val = api_entities.Collector(name="name_value") + assert arg == mock_val + arg = args[0].collector_id + mock_val = "collector_id_value" + assert arg == mock_val + + +def test_create_collector_flattened_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_collector( + rapidmigrationassessment.CreateCollectorRequest(), + parent="parent_value", + collector=api_entities.Collector(name="name_value"), + collector_id="collector_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_collector_flattened_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_collector( + parent="parent_value", + collector=api_entities.Collector(name="name_value"), + collector_id="collector_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].collector + mock_val = api_entities.Collector(name="name_value") + assert arg == mock_val + arg = args[0].collector_id + mock_val = "collector_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_collector_flattened_error_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_collector( + rapidmigrationassessment.CreateCollectorRequest(), + parent="parent_value", + collector=api_entities.Collector(name="name_value"), + collector_id="collector_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.CreateAnnotationRequest, + dict, + ], +) +def test_create_annotation(request_type, transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_annotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_annotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.CreateAnnotationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_annotation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_annotation), "__call__" + ) as call: + client.create_annotation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.CreateAnnotationRequest() + + +@pytest.mark.asyncio +async def test_create_annotation_async( + transport: str = "grpc_asyncio", + request_type=rapidmigrationassessment.CreateAnnotationRequest, +): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_annotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_annotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.CreateAnnotationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_annotation_async_from_dict(): + await test_create_annotation_async(request_type=dict) + + +def test_create_annotation_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.CreateAnnotationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_annotation), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_annotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_annotation_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.CreateAnnotationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_annotation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_annotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_annotation_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_annotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_annotation( + parent="parent_value", + annotation=api_entities.Annotation(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].annotation + mock_val = api_entities.Annotation(name="name_value") + assert arg == mock_val + + +def test_create_annotation_flattened_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_annotation( + rapidmigrationassessment.CreateAnnotationRequest(), + parent="parent_value", + annotation=api_entities.Annotation(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_annotation_flattened_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_annotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_annotation( + parent="parent_value", + annotation=api_entities.Annotation(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].annotation + mock_val = api_entities.Annotation(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_annotation_flattened_error_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_annotation( + rapidmigrationassessment.CreateAnnotationRequest(), + parent="parent_value", + annotation=api_entities.Annotation(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.GetAnnotationRequest, + dict, + ], +) +def test_get_annotation(request_type, transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_annotation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = api_entities.Annotation( + name="name_value", + type_=api_entities.Annotation.Type.TYPE_LEGACY_EXPORT_CONSENT, + ) + response = client.get_annotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.GetAnnotationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, api_entities.Annotation) + assert response.name == "name_value" + assert response.type_ == api_entities.Annotation.Type.TYPE_LEGACY_EXPORT_CONSENT + + +def test_get_annotation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_annotation), "__call__") as call: + client.get_annotation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.GetAnnotationRequest() + + +@pytest.mark.asyncio +async def test_get_annotation_async( + transport: str = "grpc_asyncio", + request_type=rapidmigrationassessment.GetAnnotationRequest, +): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_annotation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + api_entities.Annotation( + name="name_value", + type_=api_entities.Annotation.Type.TYPE_LEGACY_EXPORT_CONSENT, + ) + ) + response = await client.get_annotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.GetAnnotationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, api_entities.Annotation) + assert response.name == "name_value" + assert response.type_ == api_entities.Annotation.Type.TYPE_LEGACY_EXPORT_CONSENT + + +@pytest.mark.asyncio +async def test_get_annotation_async_from_dict(): + await test_get_annotation_async(request_type=dict) + + +def test_get_annotation_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.GetAnnotationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_annotation), "__call__") as call: + call.return_value = api_entities.Annotation() + client.get_annotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_annotation_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.GetAnnotationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_annotation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + api_entities.Annotation() + ) + await client.get_annotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_annotation_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_annotation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = api_entities.Annotation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_annotation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_annotation_flattened_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_annotation( + rapidmigrationassessment.GetAnnotationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_annotation_flattened_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_annotation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = api_entities.Annotation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + api_entities.Annotation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_annotation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_annotation_flattened_error_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_annotation( + rapidmigrationassessment.GetAnnotationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.ListCollectorsRequest, + dict, + ], +) +def test_list_collectors(request_type, transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_collectors), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = rapidmigrationassessment.ListCollectorsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_collectors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.ListCollectorsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCollectorsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_collectors_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_collectors), "__call__") as call: + client.list_collectors() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.ListCollectorsRequest() + + +@pytest.mark.asyncio +async def test_list_collectors_async( + transport: str = "grpc_asyncio", + request_type=rapidmigrationassessment.ListCollectorsRequest, +): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_collectors), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + rapidmigrationassessment.ListCollectorsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_collectors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.ListCollectorsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCollectorsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_collectors_async_from_dict(): + await test_list_collectors_async(request_type=dict) + + +def test_list_collectors_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.ListCollectorsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_collectors), "__call__") as call: + call.return_value = rapidmigrationassessment.ListCollectorsResponse() + client.list_collectors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_collectors_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.ListCollectorsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_collectors), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + rapidmigrationassessment.ListCollectorsResponse() + ) + await client.list_collectors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_collectors_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_collectors), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = rapidmigrationassessment.ListCollectorsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_collectors( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_collectors_flattened_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_collectors( + rapidmigrationassessment.ListCollectorsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_collectors_flattened_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_collectors), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = rapidmigrationassessment.ListCollectorsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + rapidmigrationassessment.ListCollectorsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_collectors( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_collectors_flattened_error_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_collectors( + rapidmigrationassessment.ListCollectorsRequest(), + parent="parent_value", + ) + + +def test_list_collectors_pager(transport_name: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_collectors), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + rapidmigrationassessment.ListCollectorsResponse( + collectors=[ + api_entities.Collector(), + api_entities.Collector(), + api_entities.Collector(), + ], + next_page_token="abc", + ), + rapidmigrationassessment.ListCollectorsResponse( + collectors=[], + next_page_token="def", + ), + rapidmigrationassessment.ListCollectorsResponse( + collectors=[ + api_entities.Collector(), + ], + next_page_token="ghi", + ), + rapidmigrationassessment.ListCollectorsResponse( + collectors=[ + api_entities.Collector(), + api_entities.Collector(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_collectors(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, api_entities.Collector) for i in results) + + +def test_list_collectors_pages(transport_name: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_collectors), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + rapidmigrationassessment.ListCollectorsResponse( + collectors=[ + api_entities.Collector(), + api_entities.Collector(), + api_entities.Collector(), + ], + next_page_token="abc", + ), + rapidmigrationassessment.ListCollectorsResponse( + collectors=[], + next_page_token="def", + ), + rapidmigrationassessment.ListCollectorsResponse( + collectors=[ + api_entities.Collector(), + ], + next_page_token="ghi", + ), + rapidmigrationassessment.ListCollectorsResponse( + collectors=[ + api_entities.Collector(), + api_entities.Collector(), + ], + ), + RuntimeError, + ) + pages = list(client.list_collectors(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_collectors_async_pager(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_collectors), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + rapidmigrationassessment.ListCollectorsResponse( + collectors=[ + api_entities.Collector(), + api_entities.Collector(), + api_entities.Collector(), + ], + next_page_token="abc", + ), + rapidmigrationassessment.ListCollectorsResponse( + collectors=[], + next_page_token="def", + ), + rapidmigrationassessment.ListCollectorsResponse( + collectors=[ + api_entities.Collector(), + ], + next_page_token="ghi", + ), + rapidmigrationassessment.ListCollectorsResponse( + collectors=[ + api_entities.Collector(), + api_entities.Collector(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_collectors( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, api_entities.Collector) for i in responses) + + +@pytest.mark.asyncio +async def test_list_collectors_async_pages(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_collectors), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + rapidmigrationassessment.ListCollectorsResponse( + collectors=[ + api_entities.Collector(), + api_entities.Collector(), + api_entities.Collector(), + ], + next_page_token="abc", + ), + rapidmigrationassessment.ListCollectorsResponse( + collectors=[], + next_page_token="def", + ), + rapidmigrationassessment.ListCollectorsResponse( + collectors=[ + api_entities.Collector(), + ], + next_page_token="ghi", + ), + rapidmigrationassessment.ListCollectorsResponse( + collectors=[ + api_entities.Collector(), + api_entities.Collector(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_collectors(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.GetCollectorRequest, + dict, + ], +) +def test_get_collector(request_type, transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = api_entities.Collector( + name="name_value", + display_name="display_name_value", + description="description_value", + service_account="service_account_value", + bucket="bucket_value", + expected_asset_count=2137, + state=api_entities.Collector.State.STATE_INITIALIZING, + client_version="client_version_value", + collection_days=1596, + eula_uri="eula_uri_value", + ) + response = client.get_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.GetCollectorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, api_entities.Collector) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.service_account == "service_account_value" + assert response.bucket == "bucket_value" + assert response.expected_asset_count == 2137 + assert response.state == api_entities.Collector.State.STATE_INITIALIZING + assert response.client_version == "client_version_value" + assert response.collection_days == 1596 + assert response.eula_uri == "eula_uri_value" + + +def test_get_collector_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_collector), "__call__") as call: + client.get_collector() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.GetCollectorRequest() + + +@pytest.mark.asyncio +async def test_get_collector_async( + transport: str = "grpc_asyncio", + request_type=rapidmigrationassessment.GetCollectorRequest, +): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + api_entities.Collector( + name="name_value", + display_name="display_name_value", + description="description_value", + service_account="service_account_value", + bucket="bucket_value", + expected_asset_count=2137, + state=api_entities.Collector.State.STATE_INITIALIZING, + client_version="client_version_value", + collection_days=1596, + eula_uri="eula_uri_value", + ) + ) + response = await client.get_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.GetCollectorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, api_entities.Collector) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.service_account == "service_account_value" + assert response.bucket == "bucket_value" + assert response.expected_asset_count == 2137 + assert response.state == api_entities.Collector.State.STATE_INITIALIZING + assert response.client_version == "client_version_value" + assert response.collection_days == 1596 + assert response.eula_uri == "eula_uri_value" + + +@pytest.mark.asyncio +async def test_get_collector_async_from_dict(): + await test_get_collector_async(request_type=dict) + + +def test_get_collector_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.GetCollectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_collector), "__call__") as call: + call.return_value = api_entities.Collector() + client.get_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_collector_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.GetCollectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_collector), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + api_entities.Collector() + ) + await client.get_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_collector_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = api_entities.Collector() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_collector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_collector_flattened_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_collector( + rapidmigrationassessment.GetCollectorRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_collector_flattened_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = api_entities.Collector() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + api_entities.Collector() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_collector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_collector_flattened_error_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_collector( + rapidmigrationassessment.GetCollectorRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.UpdateCollectorRequest, + dict, + ], +) +def test_update_collector(request_type, transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.UpdateCollectorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_collector_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_collector), "__call__") as call: + client.update_collector() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.UpdateCollectorRequest() + + +@pytest.mark.asyncio +async def test_update_collector_async( + transport: str = "grpc_asyncio", + request_type=rapidmigrationassessment.UpdateCollectorRequest, +): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.UpdateCollectorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_collector_async_from_dict(): + await test_update_collector_async(request_type=dict) + + +def test_update_collector_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.UpdateCollectorRequest() + + request.collector.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_collector), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "collector.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_collector_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.UpdateCollectorRequest() + + request.collector.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_collector), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "collector.name=name_value", + ) in kw["metadata"] + + +def test_update_collector_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_collector( + collector=api_entities.Collector(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].collector + mock_val = api_entities.Collector(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_collector_flattened_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_collector( + rapidmigrationassessment.UpdateCollectorRequest(), + collector=api_entities.Collector(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_collector_flattened_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_collector( + collector=api_entities.Collector(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].collector + mock_val = api_entities.Collector(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_collector_flattened_error_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_collector( + rapidmigrationassessment.UpdateCollectorRequest(), + collector=api_entities.Collector(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.DeleteCollectorRequest, + dict, + ], +) +def test_delete_collector(request_type, transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.DeleteCollectorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_collector_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_collector), "__call__") as call: + client.delete_collector() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.DeleteCollectorRequest() + + +@pytest.mark.asyncio +async def test_delete_collector_async( + transport: str = "grpc_asyncio", + request_type=rapidmigrationassessment.DeleteCollectorRequest, +): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.DeleteCollectorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_collector_async_from_dict(): + await test_delete_collector_async(request_type=dict) + + +def test_delete_collector_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.DeleteCollectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_collector), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_collector_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.DeleteCollectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_collector), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_collector_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_collector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_collector_flattened_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_collector( + rapidmigrationassessment.DeleteCollectorRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_collector_flattened_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_collector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_collector_flattened_error_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_collector( + rapidmigrationassessment.DeleteCollectorRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.ResumeCollectorRequest, + dict, + ], +) +def test_resume_collector(request_type, transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.resume_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.ResumeCollectorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_resume_collector_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_collector), "__call__") as call: + client.resume_collector() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.ResumeCollectorRequest() + + +@pytest.mark.asyncio +async def test_resume_collector_async( + transport: str = "grpc_asyncio", + request_type=rapidmigrationassessment.ResumeCollectorRequest, +): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.resume_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.ResumeCollectorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_resume_collector_async_from_dict(): + await test_resume_collector_async(request_type=dict) + + +def test_resume_collector_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.ResumeCollectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_collector), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.resume_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_resume_collector_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.ResumeCollectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_collector), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.resume_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_resume_collector_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.resume_collector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_resume_collector_flattened_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_collector( + rapidmigrationassessment.ResumeCollectorRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_resume_collector_flattened_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.resume_collector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_resume_collector_flattened_error_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.resume_collector( + rapidmigrationassessment.ResumeCollectorRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.RegisterCollectorRequest, + dict, + ], +) +def test_register_collector(request_type, transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.register_collector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.register_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.RegisterCollectorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_register_collector_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.register_collector), "__call__" + ) as call: + client.register_collector() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.RegisterCollectorRequest() + + +@pytest.mark.asyncio +async def test_register_collector_async( + transport: str = "grpc_asyncio", + request_type=rapidmigrationassessment.RegisterCollectorRequest, +): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.register_collector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.register_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.RegisterCollectorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_register_collector_async_from_dict(): + await test_register_collector_async(request_type=dict) + + +def test_register_collector_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.RegisterCollectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.register_collector), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.register_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_register_collector_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.RegisterCollectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.register_collector), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.register_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_register_collector_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.register_collector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.register_collector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_register_collector_flattened_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.register_collector( + rapidmigrationassessment.RegisterCollectorRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_register_collector_flattened_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.register_collector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.register_collector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_register_collector_flattened_error_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.register_collector( + rapidmigrationassessment.RegisterCollectorRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.PauseCollectorRequest, + dict, + ], +) +def test_pause_collector(request_type, transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.pause_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.PauseCollectorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_pause_collector_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_collector), "__call__") as call: + client.pause_collector() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.PauseCollectorRequest() + + +@pytest.mark.asyncio +async def test_pause_collector_async( + transport: str = "grpc_asyncio", + request_type=rapidmigrationassessment.PauseCollectorRequest, +): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.pause_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == rapidmigrationassessment.PauseCollectorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_pause_collector_async_from_dict(): + await test_pause_collector_async(request_type=dict) + + +def test_pause_collector_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.PauseCollectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_collector), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.pause_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_pause_collector_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = rapidmigrationassessment.PauseCollectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_collector), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.pause_collector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_pause_collector_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pause_collector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_pause_collector_flattened_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_collector( + rapidmigrationassessment.PauseCollectorRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_pause_collector_flattened_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_collector), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pause_collector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_pause_collector_flattened_error_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pause_collector( + rapidmigrationassessment.PauseCollectorRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.CreateCollectorRequest, + dict, + ], +) +def test_create_collector_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["collector"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "description": "description_value", + "service_account": "service_account_value", + "bucket": "bucket_value", + "expected_asset_count": 2137, + "state": 1, + "client_version": "client_version_value", + "guest_os_scan": {"core_source": "core_source_value"}, + "vsphere_scan": {"core_source": "core_source_value"}, + "collection_days": 1596, + "eula_uri": "eula_uri_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_collector(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_collector_rest_required_fields( + request_type=rapidmigrationassessment.CreateCollectorRequest, +): + transport_class = transports.RapidMigrationAssessmentRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["collector_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "collectorId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_collector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "collectorId" in jsonified_request + assert jsonified_request["collectorId"] == request_init["collector_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["collectorId"] = "collector_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_collector._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "collector_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "collectorId" in jsonified_request + assert jsonified_request["collectorId"] == "collector_id_value" + + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_collector(request) + + expected_params = [ + ( + "collectorId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_collector_rest_unset_required_fields(): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_collector._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "collectorId", + "requestId", + ) + ) + & set( + ( + "parent", + "collectorId", + "collector", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_collector_rest_interceptors(null_interceptor): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RapidMigrationAssessmentRestInterceptor(), + ) + client = RapidMigrationAssessmentClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "post_create_collector" + ) as post, mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "pre_create_collector" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = rapidmigrationassessment.CreateCollectorRequest.pb( + rapidmigrationassessment.CreateCollectorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = rapidmigrationassessment.CreateCollectorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_collector( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_collector_rest_bad_request( + transport: str = "rest", + request_type=rapidmigrationassessment.CreateCollectorRequest, +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["collector"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "description": "description_value", + "service_account": "service_account_value", + "bucket": "bucket_value", + "expected_asset_count": 2137, + "state": 1, + "client_version": "client_version_value", + "guest_os_scan": {"core_source": "core_source_value"}, + "vsphere_scan": {"core_source": "core_source_value"}, + "collection_days": 1596, + "eula_uri": "eula_uri_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_collector(request) + + +def test_create_collector_rest_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + collector=api_entities.Collector(name="name_value"), + collector_id="collector_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_collector(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/collectors" % client.transport._host, + args[1], + ) + + +def test_create_collector_rest_flattened_error(transport: str = "rest"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_collector( + rapidmigrationassessment.CreateCollectorRequest(), + parent="parent_value", + collector=api_entities.Collector(name="name_value"), + collector_id="collector_id_value", + ) + + +def test_create_collector_rest_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.CreateAnnotationRequest, + dict, + ], +) +def test_create_annotation_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["annotation"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "type_": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_annotation(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_annotation_rest_required_fields( + request_type=rapidmigrationassessment.CreateAnnotationRequest, +): + transport_class = transports.RapidMigrationAssessmentRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_annotation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_annotation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_annotation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_annotation_rest_unset_required_fields(): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_annotation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "parent", + "annotation", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_annotation_rest_interceptors(null_interceptor): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RapidMigrationAssessmentRestInterceptor(), + ) + client = RapidMigrationAssessmentClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "post_create_annotation" + ) as post, mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "pre_create_annotation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = rapidmigrationassessment.CreateAnnotationRequest.pb( + rapidmigrationassessment.CreateAnnotationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = rapidmigrationassessment.CreateAnnotationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_annotation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_annotation_rest_bad_request( + transport: str = "rest", + request_type=rapidmigrationassessment.CreateAnnotationRequest, +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["annotation"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "type_": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_annotation(request) + + +def test_create_annotation_rest_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + annotation=api_entities.Annotation(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_annotation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/annotations" + % client.transport._host, + args[1], + ) + + +def test_create_annotation_rest_flattened_error(transport: str = "rest"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_annotation( + rapidmigrationassessment.CreateAnnotationRequest(), + parent="parent_value", + annotation=api_entities.Annotation(name="name_value"), + ) + + +def test_create_annotation_rest_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.GetAnnotationRequest, + dict, + ], +) +def test_get_annotation_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/annotations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = api_entities.Annotation( + name="name_value", + type_=api_entities.Annotation.Type.TYPE_LEGACY_EXPORT_CONSENT, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = api_entities.Annotation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_annotation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, api_entities.Annotation) + assert response.name == "name_value" + assert response.type_ == api_entities.Annotation.Type.TYPE_LEGACY_EXPORT_CONSENT + + +def test_get_annotation_rest_required_fields( + request_type=rapidmigrationassessment.GetAnnotationRequest, +): + transport_class = transports.RapidMigrationAssessmentRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_annotation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_annotation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = api_entities.Annotation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = api_entities.Annotation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_annotation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_annotation_rest_unset_required_fields(): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_annotation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_annotation_rest_interceptors(null_interceptor): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RapidMigrationAssessmentRestInterceptor(), + ) + client = RapidMigrationAssessmentClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "post_get_annotation" + ) as post, mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "pre_get_annotation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = rapidmigrationassessment.GetAnnotationRequest.pb( + rapidmigrationassessment.GetAnnotationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = api_entities.Annotation.to_json( + api_entities.Annotation() + ) + + request = rapidmigrationassessment.GetAnnotationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = api_entities.Annotation() + + client.get_annotation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_annotation_rest_bad_request( + transport: str = "rest", request_type=rapidmigrationassessment.GetAnnotationRequest +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/annotations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_annotation(request) + + +def test_get_annotation_rest_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = api_entities.Annotation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/annotations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = api_entities.Annotation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_annotation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/annotations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_annotation_rest_flattened_error(transport: str = "rest"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_annotation( + rapidmigrationassessment.GetAnnotationRequest(), + name="name_value", + ) + + +def test_get_annotation_rest_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.ListCollectorsRequest, + dict, + ], +) +def test_list_collectors_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = rapidmigrationassessment.ListCollectorsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = rapidmigrationassessment.ListCollectorsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_collectors(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCollectorsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_collectors_rest_required_fields( + request_type=rapidmigrationassessment.ListCollectorsRequest, +): + transport_class = transports.RapidMigrationAssessmentRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_collectors._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_collectors._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = rapidmigrationassessment.ListCollectorsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = rapidmigrationassessment.ListCollectorsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_collectors(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_collectors_rest_unset_required_fields(): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_collectors._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_collectors_rest_interceptors(null_interceptor): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RapidMigrationAssessmentRestInterceptor(), + ) + client = RapidMigrationAssessmentClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "post_list_collectors" + ) as post, mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "pre_list_collectors" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = rapidmigrationassessment.ListCollectorsRequest.pb( + rapidmigrationassessment.ListCollectorsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + rapidmigrationassessment.ListCollectorsResponse.to_json( + rapidmigrationassessment.ListCollectorsResponse() + ) + ) + + request = rapidmigrationassessment.ListCollectorsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = rapidmigrationassessment.ListCollectorsResponse() + + client.list_collectors( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_collectors_rest_bad_request( + transport: str = "rest", request_type=rapidmigrationassessment.ListCollectorsRequest +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_collectors(request) + + +def test_list_collectors_rest_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = rapidmigrationassessment.ListCollectorsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = rapidmigrationassessment.ListCollectorsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_collectors(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/collectors" % client.transport._host, + args[1], + ) + + +def test_list_collectors_rest_flattened_error(transport: str = "rest"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_collectors( + rapidmigrationassessment.ListCollectorsRequest(), + parent="parent_value", + ) + + +def test_list_collectors_rest_pager(transport: str = "rest"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + rapidmigrationassessment.ListCollectorsResponse( + collectors=[ + api_entities.Collector(), + api_entities.Collector(), + api_entities.Collector(), + ], + next_page_token="abc", + ), + rapidmigrationassessment.ListCollectorsResponse( + collectors=[], + next_page_token="def", + ), + rapidmigrationassessment.ListCollectorsResponse( + collectors=[ + api_entities.Collector(), + ], + next_page_token="ghi", + ), + rapidmigrationassessment.ListCollectorsResponse( + collectors=[ + api_entities.Collector(), + api_entities.Collector(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + rapidmigrationassessment.ListCollectorsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_collectors(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, api_entities.Collector) for i in results) + + pages = list(client.list_collectors(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.GetCollectorRequest, + dict, + ], +) +def test_get_collector_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/collectors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = api_entities.Collector( + name="name_value", + display_name="display_name_value", + description="description_value", + service_account="service_account_value", + bucket="bucket_value", + expected_asset_count=2137, + state=api_entities.Collector.State.STATE_INITIALIZING, + client_version="client_version_value", + collection_days=1596, + eula_uri="eula_uri_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = api_entities.Collector.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_collector(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, api_entities.Collector) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.service_account == "service_account_value" + assert response.bucket == "bucket_value" + assert response.expected_asset_count == 2137 + assert response.state == api_entities.Collector.State.STATE_INITIALIZING + assert response.client_version == "client_version_value" + assert response.collection_days == 1596 + assert response.eula_uri == "eula_uri_value" + + +def test_get_collector_rest_required_fields( + request_type=rapidmigrationassessment.GetCollectorRequest, +): + transport_class = transports.RapidMigrationAssessmentRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_collector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_collector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = api_entities.Collector() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = api_entities.Collector.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_collector(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_collector_rest_unset_required_fields(): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_collector._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_collector_rest_interceptors(null_interceptor): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RapidMigrationAssessmentRestInterceptor(), + ) + client = RapidMigrationAssessmentClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "post_get_collector" + ) as post, mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "pre_get_collector" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = rapidmigrationassessment.GetCollectorRequest.pb( + rapidmigrationassessment.GetCollectorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = api_entities.Collector.to_json( + api_entities.Collector() + ) + + request = rapidmigrationassessment.GetCollectorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = api_entities.Collector() + + client.get_collector( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_collector_rest_bad_request( + transport: str = "rest", request_type=rapidmigrationassessment.GetCollectorRequest +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/collectors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_collector(request) + + +def test_get_collector_rest_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = api_entities.Collector() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/collectors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = api_entities.Collector.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_collector(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/collectors/*}" % client.transport._host, + args[1], + ) + + +def test_get_collector_rest_flattened_error(transport: str = "rest"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_collector( + rapidmigrationassessment.GetCollectorRequest(), + name="name_value", + ) + + +def test_get_collector_rest_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.UpdateCollectorRequest, + dict, + ], +) +def test_update_collector_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "collector": {"name": "projects/sample1/locations/sample2/collectors/sample3"} + } + request_init["collector"] = { + "name": "projects/sample1/locations/sample2/collectors/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "description": "description_value", + "service_account": "service_account_value", + "bucket": "bucket_value", + "expected_asset_count": 2137, + "state": 1, + "client_version": "client_version_value", + "guest_os_scan": {"core_source": "core_source_value"}, + "vsphere_scan": {"core_source": "core_source_value"}, + "collection_days": 1596, + "eula_uri": "eula_uri_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_collector(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_collector_rest_required_fields( + request_type=rapidmigrationassessment.UpdateCollectorRequest, +): + transport_class = transports.RapidMigrationAssessmentRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_collector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_collector._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_collector(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_collector_rest_unset_required_fields(): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_collector._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "collector", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_collector_rest_interceptors(null_interceptor): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RapidMigrationAssessmentRestInterceptor(), + ) + client = RapidMigrationAssessmentClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "post_update_collector" + ) as post, mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "pre_update_collector" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = rapidmigrationassessment.UpdateCollectorRequest.pb( + rapidmigrationassessment.UpdateCollectorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = rapidmigrationassessment.UpdateCollectorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_collector( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_collector_rest_bad_request( + transport: str = "rest", + request_type=rapidmigrationassessment.UpdateCollectorRequest, +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "collector": {"name": "projects/sample1/locations/sample2/collectors/sample3"} + } + request_init["collector"] = { + "name": "projects/sample1/locations/sample2/collectors/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "description": "description_value", + "service_account": "service_account_value", + "bucket": "bucket_value", + "expected_asset_count": 2137, + "state": 1, + "client_version": "client_version_value", + "guest_os_scan": {"core_source": "core_source_value"}, + "vsphere_scan": {"core_source": "core_source_value"}, + "collection_days": 1596, + "eula_uri": "eula_uri_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_collector(request) + + +def test_update_collector_rest_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "collector": { + "name": "projects/sample1/locations/sample2/collectors/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + collector=api_entities.Collector(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_collector(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{collector.name=projects/*/locations/*/collectors/*}" + % client.transport._host, + args[1], + ) + + +def test_update_collector_rest_flattened_error(transport: str = "rest"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_collector( + rapidmigrationassessment.UpdateCollectorRequest(), + collector=api_entities.Collector(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_collector_rest_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.DeleteCollectorRequest, + dict, + ], +) +def test_delete_collector_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/collectors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_collector(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_collector_rest_required_fields( + request_type=rapidmigrationassessment.DeleteCollectorRequest, +): + transport_class = transports.RapidMigrationAssessmentRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_collector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_collector._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_collector(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_collector_rest_unset_required_fields(): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_collector._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_collector_rest_interceptors(null_interceptor): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RapidMigrationAssessmentRestInterceptor(), + ) + client = RapidMigrationAssessmentClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "post_delete_collector" + ) as post, mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "pre_delete_collector" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = rapidmigrationassessment.DeleteCollectorRequest.pb( + rapidmigrationassessment.DeleteCollectorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = rapidmigrationassessment.DeleteCollectorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_collector( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_collector_rest_bad_request( + transport: str = "rest", + request_type=rapidmigrationassessment.DeleteCollectorRequest, +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/collectors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_collector(request) + + +def test_delete_collector_rest_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/collectors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_collector(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/collectors/*}" % client.transport._host, + args[1], + ) + + +def test_delete_collector_rest_flattened_error(transport: str = "rest"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_collector( + rapidmigrationassessment.DeleteCollectorRequest(), + name="name_value", + ) + + +def test_delete_collector_rest_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.ResumeCollectorRequest, + dict, + ], +) +def test_resume_collector_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/collectors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.resume_collector(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_resume_collector_rest_required_fields( + request_type=rapidmigrationassessment.ResumeCollectorRequest, +): + transport_class = transports.RapidMigrationAssessmentRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resume_collector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resume_collector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.resume_collector(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_resume_collector_rest_unset_required_fields(): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.resume_collector._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resume_collector_rest_interceptors(null_interceptor): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RapidMigrationAssessmentRestInterceptor(), + ) + client = RapidMigrationAssessmentClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "post_resume_collector" + ) as post, mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "pre_resume_collector" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = rapidmigrationassessment.ResumeCollectorRequest.pb( + rapidmigrationassessment.ResumeCollectorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = rapidmigrationassessment.ResumeCollectorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.resume_collector( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resume_collector_rest_bad_request( + transport: str = "rest", + request_type=rapidmigrationassessment.ResumeCollectorRequest, +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/collectors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resume_collector(request) + + +def test_resume_collector_rest_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/collectors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.resume_collector(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/collectors/*}:resume" + % client.transport._host, + args[1], + ) + + +def test_resume_collector_rest_flattened_error(transport: str = "rest"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_collector( + rapidmigrationassessment.ResumeCollectorRequest(), + name="name_value", + ) + + +def test_resume_collector_rest_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.RegisterCollectorRequest, + dict, + ], +) +def test_register_collector_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/collectors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.register_collector(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_register_collector_rest_required_fields( + request_type=rapidmigrationassessment.RegisterCollectorRequest, +): + transport_class = transports.RapidMigrationAssessmentRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).register_collector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).register_collector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.register_collector(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_register_collector_rest_unset_required_fields(): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.register_collector._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_register_collector_rest_interceptors(null_interceptor): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RapidMigrationAssessmentRestInterceptor(), + ) + client = RapidMigrationAssessmentClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "post_register_collector" + ) as post, mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "pre_register_collector" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = rapidmigrationassessment.RegisterCollectorRequest.pb( + rapidmigrationassessment.RegisterCollectorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = rapidmigrationassessment.RegisterCollectorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.register_collector( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_register_collector_rest_bad_request( + transport: str = "rest", + request_type=rapidmigrationassessment.RegisterCollectorRequest, +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/collectors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.register_collector(request) + + +def test_register_collector_rest_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/collectors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.register_collector(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/collectors/*}:register" + % client.transport._host, + args[1], + ) + + +def test_register_collector_rest_flattened_error(transport: str = "rest"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.register_collector( + rapidmigrationassessment.RegisterCollectorRequest(), + name="name_value", + ) + + +def test_register_collector_rest_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + rapidmigrationassessment.PauseCollectorRequest, + dict, + ], +) +def test_pause_collector_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/collectors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.pause_collector(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_pause_collector_rest_required_fields( + request_type=rapidmigrationassessment.PauseCollectorRequest, +): + transport_class = transports.RapidMigrationAssessmentRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pause_collector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pause_collector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.pause_collector(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_pause_collector_rest_unset_required_fields(): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.pause_collector._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_pause_collector_rest_interceptors(null_interceptor): + transport = transports.RapidMigrationAssessmentRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RapidMigrationAssessmentRestInterceptor(), + ) + client = RapidMigrationAssessmentClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "post_pause_collector" + ) as post, mock.patch.object( + transports.RapidMigrationAssessmentRestInterceptor, "pre_pause_collector" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = rapidmigrationassessment.PauseCollectorRequest.pb( + rapidmigrationassessment.PauseCollectorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = rapidmigrationassessment.PauseCollectorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.pause_collector( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_pause_collector_rest_bad_request( + transport: str = "rest", request_type=rapidmigrationassessment.PauseCollectorRequest +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/collectors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.pause_collector(request) + + +def test_pause_collector_rest_flattened(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/collectors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.pause_collector(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/collectors/*}:pause" + % client.transport._host, + args[1], + ) + + +def test_pause_collector_rest_flattened_error(transport: str = "rest"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_collector( + rapidmigrationassessment.PauseCollectorRequest(), + name="name_value", + ) + + +def test_pause_collector_rest_error(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RapidMigrationAssessmentGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RapidMigrationAssessmentGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RapidMigrationAssessmentClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RapidMigrationAssessmentGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RapidMigrationAssessmentClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RapidMigrationAssessmentClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RapidMigrationAssessmentGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RapidMigrationAssessmentClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RapidMigrationAssessmentGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RapidMigrationAssessmentClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.RapidMigrationAssessmentGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.RapidMigrationAssessmentGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RapidMigrationAssessmentGrpcTransport, + transports.RapidMigrationAssessmentGrpcAsyncIOTransport, + transports.RapidMigrationAssessmentRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = RapidMigrationAssessmentClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.RapidMigrationAssessmentGrpcTransport, + ) + + +def test_rapid_migration_assessment_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RapidMigrationAssessmentTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_rapid_migration_assessment_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.rapidmigrationassessment_v1.services.rapid_migration_assessment.transports.RapidMigrationAssessmentTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.RapidMigrationAssessmentTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_collector", + "create_annotation", + "get_annotation", + "list_collectors", + "get_collector", + "update_collector", + "delete_collector", + "resume_collector", + "register_collector", + "pause_collector", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_rapid_migration_assessment_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.rapidmigrationassessment_v1.services.rapid_migration_assessment.transports.RapidMigrationAssessmentTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RapidMigrationAssessmentTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_rapid_migration_assessment_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.rapidmigrationassessment_v1.services.rapid_migration_assessment.transports.RapidMigrationAssessmentTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RapidMigrationAssessmentTransport() + adc.assert_called_once() + + +def test_rapid_migration_assessment_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RapidMigrationAssessmentClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RapidMigrationAssessmentGrpcTransport, + transports.RapidMigrationAssessmentGrpcAsyncIOTransport, + ], +) +def test_rapid_migration_assessment_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RapidMigrationAssessmentGrpcTransport, + transports.RapidMigrationAssessmentGrpcAsyncIOTransport, + transports.RapidMigrationAssessmentRestTransport, + ], +) +def test_rapid_migration_assessment_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.RapidMigrationAssessmentGrpcTransport, grpc_helpers), + (transports.RapidMigrationAssessmentGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_rapid_migration_assessment_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "rapidmigrationassessment.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="rapidmigrationassessment.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RapidMigrationAssessmentGrpcTransport, + transports.RapidMigrationAssessmentGrpcAsyncIOTransport, + ], +) +def test_rapid_migration_assessment_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_rapid_migration_assessment_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RapidMigrationAssessmentRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_rapid_migration_assessment_rest_lro_client(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_rapid_migration_assessment_host_no_port(transport_name): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="rapidmigrationassessment.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "rapidmigrationassessment.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://rapidmigrationassessment.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_rapid_migration_assessment_host_with_port(transport_name): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="rapidmigrationassessment.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "rapidmigrationassessment.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://rapidmigrationassessment.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_rapid_migration_assessment_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RapidMigrationAssessmentClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RapidMigrationAssessmentClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_collector._session + session2 = client2.transport.create_collector._session + assert session1 != session2 + session1 = client1.transport.create_annotation._session + session2 = client2.transport.create_annotation._session + assert session1 != session2 + session1 = client1.transport.get_annotation._session + session2 = client2.transport.get_annotation._session + assert session1 != session2 + session1 = client1.transport.list_collectors._session + session2 = client2.transport.list_collectors._session + assert session1 != session2 + session1 = client1.transport.get_collector._session + session2 = client2.transport.get_collector._session + assert session1 != session2 + session1 = client1.transport.update_collector._session + session2 = client2.transport.update_collector._session + assert session1 != session2 + session1 = client1.transport.delete_collector._session + session2 = client2.transport.delete_collector._session + assert session1 != session2 + session1 = client1.transport.resume_collector._session + session2 = client2.transport.resume_collector._session + assert session1 != session2 + session1 = client1.transport.register_collector._session + session2 = client2.transport.register_collector._session + assert session1 != session2 + session1 = client1.transport.pause_collector._session + session2 = client2.transport.pause_collector._session + assert session1 != session2 + + +def test_rapid_migration_assessment_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.RapidMigrationAssessmentGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_rapid_migration_assessment_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.RapidMigrationAssessmentGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.RapidMigrationAssessmentGrpcTransport, + transports.RapidMigrationAssessmentGrpcAsyncIOTransport, + ], +) +def test_rapid_migration_assessment_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.RapidMigrationAssessmentGrpcTransport, + transports.RapidMigrationAssessmentGrpcAsyncIOTransport, + ], +) +def test_rapid_migration_assessment_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_rapid_migration_assessment_grpc_lro_client(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_rapid_migration_assessment_grpc_lro_async_client(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_annotation_path(): + project = "squid" + location = "clam" + annotation = "whelk" + expected = ( + "projects/{project}/locations/{location}/annotations/{annotation}".format( + project=project, + location=location, + annotation=annotation, + ) + ) + actual = RapidMigrationAssessmentClient.annotation_path( + project, location, annotation + ) + assert expected == actual + + +def test_parse_annotation_path(): + expected = { + "project": "octopus", + "location": "oyster", + "annotation": "nudibranch", + } + path = RapidMigrationAssessmentClient.annotation_path(**expected) + + # Check that the path construction is reversible. + actual = RapidMigrationAssessmentClient.parse_annotation_path(path) + assert expected == actual + + +def test_collector_path(): + project = "cuttlefish" + location = "mussel" + collector = "winkle" + expected = "projects/{project}/locations/{location}/collectors/{collector}".format( + project=project, + location=location, + collector=collector, + ) + actual = RapidMigrationAssessmentClient.collector_path(project, location, collector) + assert expected == actual + + +def test_parse_collector_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "collector": "abalone", + } + path = RapidMigrationAssessmentClient.collector_path(**expected) + + # Check that the path construction is reversible. + actual = RapidMigrationAssessmentClient.parse_collector_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = RapidMigrationAssessmentClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RapidMigrationAssessmentClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RapidMigrationAssessmentClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = RapidMigrationAssessmentClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RapidMigrationAssessmentClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RapidMigrationAssessmentClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = RapidMigrationAssessmentClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RapidMigrationAssessmentClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RapidMigrationAssessmentClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = RapidMigrationAssessmentClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RapidMigrationAssessmentClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RapidMigrationAssessmentClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = RapidMigrationAssessmentClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RapidMigrationAssessmentClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RapidMigrationAssessmentClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.RapidMigrationAssessmentTransport, "_prep_wrapped_messages" + ) as prep: + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.RapidMigrationAssessmentTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = RapidMigrationAssessmentClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = RapidMigrationAssessmentAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = RapidMigrationAssessmentClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + RapidMigrationAssessmentClient, + transports.RapidMigrationAssessmentGrpcTransport, + ), + ( + RapidMigrationAssessmentAsyncClient, + transports.RapidMigrationAssessmentGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-storageinsights/.OwlBot.yaml b/packages/google-cloud-storageinsights/.OwlBot.yaml new file mode 100644 index 000000000000..69ddb4a95c8e --- /dev/null +++ b/packages/google-cloud-storageinsights/.OwlBot.yaml @@ -0,0 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/storageinsights/(.*)/.*-py + dest: /owl-bot-staging/google-cloud-storageinsights/$1 diff --git a/packages/google-cloud-storageinsights/.coveragerc b/packages/google-cloud-storageinsights/.coveragerc new file mode 100644 index 000000000000..f7ef2605d609 --- /dev/null +++ b/packages/google-cloud-storageinsights/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/storageinsights/__init__.py + google/cloud/storageinsights/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-storageinsights/.flake8 b/packages/google-cloud-storageinsights/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-cloud-storageinsights/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-storageinsights/.gitignore b/packages/google-cloud-storageinsights/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-storageinsights/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-storageinsights/.repo-metadata.json b/packages/google-cloud-storageinsights/.repo-metadata.json new file mode 100644 index 000000000000..adcb267fa33e --- /dev/null +++ b/packages/google-cloud-storageinsights/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "storageinsights", + "name_pretty": "Storage Insights API", + "api_description": "The Storage Insights inventory report feature helps you manage your object storage at scale.", + "product_documentation": "https://cloud.google.com/storage/docs/insights/storage-insights", + "client_documentation": "https://cloud.google.com/python/docs/reference/storageinsights/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1156610", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-storageinsights", + "api_id": "storageinsights.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "storageinsights" +} diff --git a/packages/google-cloud-storageinsights/CHANGELOG.md b/packages/google-cloud-storageinsights/CHANGELOG.md new file mode 100644 index 000000000000..3e3115d7ee44 --- /dev/null +++ b/packages/google-cloud-storageinsights/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 0.1.0 (2023-04-19) + + +### Features + +* add initial files for google.cloud.storageinsights.v1 ([#11091](https://github.com/googleapis/google-cloud-python/issues/11091)) ([fc600ad](https://github.com/googleapis/google-cloud-python/commit/fc600adc175d879eb34dda6ef86de28bdb99f27e)) + +## Changelog diff --git a/packages/google-cloud-storageinsights/CODE_OF_CONDUCT.md b/packages/google-cloud-storageinsights/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-storageinsights/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-storageinsights/CONTRIBUTING.rst b/packages/google-cloud-storageinsights/CONTRIBUTING.rst new file mode 100644 index 000000000000..fae8c322d4b5 --- /dev/null +++ b/packages/google-cloud-storageinsights/CONTRIBUTING.rst @@ -0,0 +1,281 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-storageinsights + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +We also explicitly decided to support Python 3 beginning with version 3.7. +Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-storageinsights/LICENSE b/packages/google-cloud-storageinsights/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-storageinsights/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-storageinsights/MANIFEST.in b/packages/google-cloud-storageinsights/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-cloud-storageinsights/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-storageinsights/README.rst b/packages/google-cloud-storageinsights/README.rst new file mode 100644 index 000000000000..3284b672ca72 --- /dev/null +++ b/packages/google-cloud-storageinsights/README.rst @@ -0,0 +1,107 @@ +Python Client for Storage Insights API +====================================== + +|preview| |pypi| |versions| + +`Storage Insights API`_: The Storage Insights inventory report feature helps you manage your object storage at scale. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-storageinsights.svg + :target: https://pypi.org/project/google-cloud-storageinsights/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-storageinsights.svg + :target: https://pypi.org/project/google-cloud-storageinsights/ +.. _Storage Insights API: https://cloud.google.com/storage/docs/insights/storage-insights +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/storageinsights/latest +.. _Product Documentation: https://cloud.google.com/storage/docs/insights/storage-insights + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Storage Insights API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Storage Insights API.: https://cloud.google.com/storage/docs/insights/storage-insights +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-storageinsights + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-storageinsights + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Storage Insights API + to see other available methods on the client. +- Read the `Storage Insights API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Storage Insights API Product documentation: https://cloud.google.com/storage/docs/insights/storage-insights +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-storageinsights/SECURITY.md b/packages/google-cloud-storageinsights/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-storageinsights/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-storageinsights/docs/CHANGELOG.md b/packages/google-cloud-storageinsights/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-storageinsights/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-storageinsights/docs/README.rst b/packages/google-cloud-storageinsights/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-storageinsights/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-storageinsights/docs/_static/custom.css b/packages/google-cloud-storageinsights/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-storageinsights/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-storageinsights/docs/_templates/layout.html b/packages/google-cloud-storageinsights/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-storageinsights/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-storageinsights/docs/conf.py b/packages/google-cloud-storageinsights/docs/conf.py new file mode 100644 index 000000000000..70313b92df82 --- /dev/null +++ b/packages/google-cloud-storageinsights/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-storageinsights documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-storageinsights" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-storageinsights", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-storageinsights-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-storageinsights.tex", + "google-cloud-storageinsights Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-storageinsights", + "google-cloud-storageinsights Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-storageinsights", + "google-cloud-storageinsights Documentation", + author, + "google-cloud-storageinsights", + "google-cloud-storageinsights Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-storageinsights/docs/index.rst b/packages/google-cloud-storageinsights/docs/index.rst new file mode 100644 index 000000000000..2cbbe0f1e947 --- /dev/null +++ b/packages/google-cloud-storageinsights/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + storageinsights_v1/services + storageinsights_v1/types + + +Changelog +--------- + +For a list of all ``google-cloud-storageinsights`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-storageinsights/docs/multiprocessing.rst b/packages/google-cloud-storageinsights/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-storageinsights/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-storageinsights/docs/storageinsights_v1/services.rst b/packages/google-cloud-storageinsights/docs/storageinsights_v1/services.rst new file mode 100644 index 000000000000..7e83acde00ce --- /dev/null +++ b/packages/google-cloud-storageinsights/docs/storageinsights_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Storageinsights v1 API +================================================ +.. toctree:: + :maxdepth: 2 + + storage_insights diff --git a/packages/google-cloud-storageinsights/docs/storageinsights_v1/storage_insights.rst b/packages/google-cloud-storageinsights/docs/storageinsights_v1/storage_insights.rst new file mode 100644 index 000000000000..31171ab96163 --- /dev/null +++ b/packages/google-cloud-storageinsights/docs/storageinsights_v1/storage_insights.rst @@ -0,0 +1,10 @@ +StorageInsights +--------------------------------- + +.. automodule:: google.cloud.storageinsights_v1.services.storage_insights + :members: + :inherited-members: + +.. automodule:: google.cloud.storageinsights_v1.services.storage_insights.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-storageinsights/docs/storageinsights_v1/types.rst b/packages/google-cloud-storageinsights/docs/storageinsights_v1/types.rst new file mode 100644 index 000000000000..853a2bed65f9 --- /dev/null +++ b/packages/google-cloud-storageinsights/docs/storageinsights_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Storageinsights v1 API +============================================= + +.. automodule:: google.cloud.storageinsights_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights/__init__.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights/__init__.py new file mode 100644 index 000000000000..5cbc875f402d --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights/__init__.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.storageinsights import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.storageinsights_v1.services.storage_insights.async_client import ( + StorageInsightsAsyncClient, +) +from google.cloud.storageinsights_v1.services.storage_insights.client import ( + StorageInsightsClient, +) +from google.cloud.storageinsights_v1.types.storageinsights import ( + CloudStorageDestinationOptions, + CloudStorageFilters, + CreateReportConfigRequest, + CSVOptions, + DeleteReportConfigRequest, + FrequencyOptions, + GetReportConfigRequest, + GetReportDetailRequest, + ListReportConfigsRequest, + ListReportConfigsResponse, + ListReportDetailsRequest, + ListReportDetailsResponse, + ObjectMetadataReportOptions, + OperationMetadata, + ParquetOptions, + ReportConfig, + ReportDetail, + UpdateReportConfigRequest, +) + +__all__ = ( + "StorageInsightsClient", + "StorageInsightsAsyncClient", + "CloudStorageDestinationOptions", + "CloudStorageFilters", + "CreateReportConfigRequest", + "CSVOptions", + "DeleteReportConfigRequest", + "FrequencyOptions", + "GetReportConfigRequest", + "GetReportDetailRequest", + "ListReportConfigsRequest", + "ListReportConfigsResponse", + "ListReportDetailsRequest", + "ListReportDetailsResponse", + "ObjectMetadataReportOptions", + "OperationMetadata", + "ParquetOptions", + "ReportConfig", + "ReportDetail", + "UpdateReportConfigRequest", +) diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights/gapic_version.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights/gapic_version.py new file mode 100644 index 000000000000..aa80f74315ce --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights/py.typed b/packages/google-cloud-storageinsights/google/cloud/storageinsights/py.typed new file mode 100644 index 000000000000..83d187acf779 --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-storageinsights package uses inline types. diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/__init__.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/__init__.py new file mode 100644 index 000000000000..782755fafd61 --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/__init__.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.storageinsights_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.storage_insights import StorageInsightsAsyncClient, StorageInsightsClient +from .types.storageinsights import ( + CloudStorageDestinationOptions, + CloudStorageFilters, + CreateReportConfigRequest, + CSVOptions, + DeleteReportConfigRequest, + FrequencyOptions, + GetReportConfigRequest, + GetReportDetailRequest, + ListReportConfigsRequest, + ListReportConfigsResponse, + ListReportDetailsRequest, + ListReportDetailsResponse, + ObjectMetadataReportOptions, + OperationMetadata, + ParquetOptions, + ReportConfig, + ReportDetail, + UpdateReportConfigRequest, +) + +__all__ = ( + "StorageInsightsAsyncClient", + "CSVOptions", + "CloudStorageDestinationOptions", + "CloudStorageFilters", + "CreateReportConfigRequest", + "DeleteReportConfigRequest", + "FrequencyOptions", + "GetReportConfigRequest", + "GetReportDetailRequest", + "ListReportConfigsRequest", + "ListReportConfigsResponse", + "ListReportDetailsRequest", + "ListReportDetailsResponse", + "ObjectMetadataReportOptions", + "OperationMetadata", + "ParquetOptions", + "ReportConfig", + "ReportDetail", + "StorageInsightsClient", + "UpdateReportConfigRequest", +) diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/gapic_metadata.json b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/gapic_metadata.json new file mode 100644 index 000000000000..1456239dafb4 --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/gapic_metadata.json @@ -0,0 +1,133 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.storageinsights_v1", + "protoPackage": "google.cloud.storageinsights.v1", + "schema": "1.0", + "services": { + "StorageInsights": { + "clients": { + "grpc": { + "libraryClient": "StorageInsightsClient", + "rpcs": { + "CreateReportConfig": { + "methods": [ + "create_report_config" + ] + }, + "DeleteReportConfig": { + "methods": [ + "delete_report_config" + ] + }, + "GetReportConfig": { + "methods": [ + "get_report_config" + ] + }, + "GetReportDetail": { + "methods": [ + "get_report_detail" + ] + }, + "ListReportConfigs": { + "methods": [ + "list_report_configs" + ] + }, + "ListReportDetails": { + "methods": [ + "list_report_details" + ] + }, + "UpdateReportConfig": { + "methods": [ + "update_report_config" + ] + } + } + }, + "grpc-async": { + "libraryClient": "StorageInsightsAsyncClient", + "rpcs": { + "CreateReportConfig": { + "methods": [ + "create_report_config" + ] + }, + "DeleteReportConfig": { + "methods": [ + "delete_report_config" + ] + }, + "GetReportConfig": { + "methods": [ + "get_report_config" + ] + }, + "GetReportDetail": { + "methods": [ + "get_report_detail" + ] + }, + "ListReportConfigs": { + "methods": [ + "list_report_configs" + ] + }, + "ListReportDetails": { + "methods": [ + "list_report_details" + ] + }, + "UpdateReportConfig": { + "methods": [ + "update_report_config" + ] + } + } + }, + "rest": { + "libraryClient": "StorageInsightsClient", + "rpcs": { + "CreateReportConfig": { + "methods": [ + "create_report_config" + ] + }, + "DeleteReportConfig": { + "methods": [ + "delete_report_config" + ] + }, + "GetReportConfig": { + "methods": [ + "get_report_config" + ] + }, + "GetReportDetail": { + "methods": [ + "get_report_detail" + ] + }, + "ListReportConfigs": { + "methods": [ + "list_report_configs" + ] + }, + "ListReportDetails": { + "methods": [ + "list_report_details" + ] + }, + "UpdateReportConfig": { + "methods": [ + "update_report_config" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/gapic_version.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/gapic_version.py new file mode 100644 index 000000000000..aa80f74315ce --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/py.typed b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/py.typed new file mode 100644 index 000000000000..83d187acf779 --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-storageinsights package uses inline types. diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/__init__.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/__init__.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/__init__.py new file mode 100644 index 000000000000..0b1e2e4ab9e1 --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import StorageInsightsAsyncClient +from .client import StorageInsightsClient + +__all__ = ( + "StorageInsightsClient", + "StorageInsightsAsyncClient", +) diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/async_client.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/async_client.py new file mode 100644 index 000000000000..20abff2b4468 --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/async_client.py @@ -0,0 +1,1354 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.storageinsights_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore + +from google.cloud.storageinsights_v1.services.storage_insights import pagers +from google.cloud.storageinsights_v1.types import storageinsights + +from .client import StorageInsightsClient +from .transports.base import DEFAULT_CLIENT_INFO, StorageInsightsTransport +from .transports.grpc_asyncio import StorageInsightsGrpcAsyncIOTransport + + +class StorageInsightsAsyncClient: + """Service describing handlers for resources""" + + _client: StorageInsightsClient + + DEFAULT_ENDPOINT = StorageInsightsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = StorageInsightsClient.DEFAULT_MTLS_ENDPOINT + + report_config_path = staticmethod(StorageInsightsClient.report_config_path) + parse_report_config_path = staticmethod( + StorageInsightsClient.parse_report_config_path + ) + report_detail_path = staticmethod(StorageInsightsClient.report_detail_path) + parse_report_detail_path = staticmethod( + StorageInsightsClient.parse_report_detail_path + ) + common_billing_account_path = staticmethod( + StorageInsightsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + StorageInsightsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(StorageInsightsClient.common_folder_path) + parse_common_folder_path = staticmethod( + StorageInsightsClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + StorageInsightsClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + StorageInsightsClient.parse_common_organization_path + ) + common_project_path = staticmethod(StorageInsightsClient.common_project_path) + parse_common_project_path = staticmethod( + StorageInsightsClient.parse_common_project_path + ) + common_location_path = staticmethod(StorageInsightsClient.common_location_path) + parse_common_location_path = staticmethod( + StorageInsightsClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StorageInsightsAsyncClient: The constructed client. + """ + return StorageInsightsClient.from_service_account_info.__func__(StorageInsightsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StorageInsightsAsyncClient: The constructed client. + """ + return StorageInsightsClient.from_service_account_file.__func__(StorageInsightsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return StorageInsightsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> StorageInsightsTransport: + """Returns the transport used by the client instance. + + Returns: + StorageInsightsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(StorageInsightsClient).get_transport_class, type(StorageInsightsClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, StorageInsightsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the storage insights client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.StorageInsightsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = StorageInsightsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_report_configs( + self, + request: Optional[Union[storageinsights.ListReportConfigsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListReportConfigsAsyncPager: + r"""Lists ReportConfigs in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storageinsights_v1 + + async def sample_list_report_configs(): + # Create a client + client = storageinsights_v1.StorageInsightsAsyncClient() + + # Initialize request argument(s) + request = storageinsights_v1.ListReportConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_report_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.storageinsights_v1.types.ListReportConfigsRequest, dict]]): + The request object. Message for requesting list of + ReportConfigs + parent (:class:`str`): + Required. Parent value for + ListReportConfigsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.storageinsights_v1.services.storage_insights.pagers.ListReportConfigsAsyncPager: + Message for response to listing + ReportConfigs + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = storageinsights.ListReportConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_report_configs, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListReportConfigsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_report_config( + self, + request: Optional[Union[storageinsights.GetReportConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storageinsights.ReportConfig: + r"""Gets details of a single ReportConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storageinsights_v1 + + async def sample_get_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsAsyncClient() + + # Initialize request argument(s) + request = storageinsights_v1.GetReportConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_report_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.storageinsights_v1.types.GetReportConfigRequest, dict]]): + The request object. Message for getting a ReportConfig + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.storageinsights_v1.types.ReportConfig: + Message describing ReportConfig + object. ReportConfig is the + configuration to generate reports. Next + ID: 12 + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = storageinsights.GetReportConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_report_config, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_report_config( + self, + request: Optional[ + Union[storageinsights.CreateReportConfigRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + report_config: Optional[storageinsights.ReportConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storageinsights.ReportConfig: + r"""Creates a new ReportConfig in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storageinsights_v1 + + async def sample_create_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsAsyncClient() + + # Initialize request argument(s) + request = storageinsights_v1.CreateReportConfigRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_report_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.storageinsights_v1.types.CreateReportConfigRequest, dict]]): + The request object. Message for creating a ReportConfig + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + report_config (:class:`google.cloud.storageinsights_v1.types.ReportConfig`): + Required. The resource being created + This corresponds to the ``report_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.storageinsights_v1.types.ReportConfig: + Message describing ReportConfig + object. ReportConfig is the + configuration to generate reports. Next + ID: 12 + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, report_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = storageinsights.CreateReportConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if report_config is not None: + request.report_config = report_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_report_config, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_report_config( + self, + request: Optional[ + Union[storageinsights.UpdateReportConfigRequest, dict] + ] = None, + *, + report_config: Optional[storageinsights.ReportConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storageinsights.ReportConfig: + r"""Updates the parameters of a single ReportConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storageinsights_v1 + + async def sample_update_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsAsyncClient() + + # Initialize request argument(s) + request = storageinsights_v1.UpdateReportConfigRequest( + ) + + # Make the request + response = await client.update_report_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.storageinsights_v1.types.UpdateReportConfigRequest, dict]]): + The request object. Message for updating a ReportConfig + report_config (:class:`google.cloud.storageinsights_v1.types.ReportConfig`): + Required. The resource being updated + This corresponds to the ``report_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the ReportConfig resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.storageinsights_v1.types.ReportConfig: + Message describing ReportConfig + object. ReportConfig is the + configuration to generate reports. Next + ID: 12 + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([report_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = storageinsights.UpdateReportConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if report_config is not None: + request.report_config = report_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_report_config, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("report_config.name", request.report_config.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_report_config( + self, + request: Optional[ + Union[storageinsights.DeleteReportConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a single ReportConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storageinsights_v1 + + async def sample_delete_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsAsyncClient() + + # Initialize request argument(s) + request = storageinsights_v1.DeleteReportConfigRequest( + name="name_value", + ) + + # Make the request + await client.delete_report_config(request=request) + + Args: + request (Optional[Union[google.cloud.storageinsights_v1.types.DeleteReportConfigRequest, dict]]): + The request object. Message for deleting a ReportConfig + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = storageinsights.DeleteReportConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_report_config, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_report_details( + self, + request: Optional[Union[storageinsights.ListReportDetailsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListReportDetailsAsyncPager: + r"""Lists ReportDetails in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storageinsights_v1 + + async def sample_list_report_details(): + # Create a client + client = storageinsights_v1.StorageInsightsAsyncClient() + + # Initialize request argument(s) + request = storageinsights_v1.ListReportDetailsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_report_details(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.storageinsights_v1.types.ListReportDetailsRequest, dict]]): + The request object. Message for requesting list of + ReportDetails + parent (:class:`str`): + Required. Parent value for + ListReportDetailsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.storageinsights_v1.services.storage_insights.pagers.ListReportDetailsAsyncPager: + Message for response to listing + ReportDetails + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = storageinsights.ListReportDetailsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_report_details, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListReportDetailsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_report_detail( + self, + request: Optional[Union[storageinsights.GetReportDetailRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storageinsights.ReportDetail: + r"""Gets details of a single ReportDetail. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storageinsights_v1 + + async def sample_get_report_detail(): + # Create a client + client = storageinsights_v1.StorageInsightsAsyncClient() + + # Initialize request argument(s) + request = storageinsights_v1.GetReportDetailRequest( + name="name_value", + ) + + # Make the request + response = await client.get_report_detail(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.storageinsights_v1.types.GetReportDetailRequest, dict]]): + The request object. Message for getting a ReportDetail + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.storageinsights_v1.types.ReportDetail: + Message describing ReportDetail + object. ReportDetail represents metadata + of generated reports for a ReportConfig. + Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = storageinsights.GetReportDetailRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_report_detail, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("StorageInsightsAsyncClient",) diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/client.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/client.py new file mode 100644 index 000000000000..d2ffa3978bdc --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/client.py @@ -0,0 +1,1572 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.storageinsights_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore + +from google.cloud.storageinsights_v1.services.storage_insights import pagers +from google.cloud.storageinsights_v1.types import storageinsights + +from .transports.base import DEFAULT_CLIENT_INFO, StorageInsightsTransport +from .transports.grpc import StorageInsightsGrpcTransport +from .transports.grpc_asyncio import StorageInsightsGrpcAsyncIOTransport +from .transports.rest import StorageInsightsRestTransport + + +class StorageInsightsClientMeta(type): + """Metaclass for the StorageInsights client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[StorageInsightsTransport]] + _transport_registry["grpc"] = StorageInsightsGrpcTransport + _transport_registry["grpc_asyncio"] = StorageInsightsGrpcAsyncIOTransport + _transport_registry["rest"] = StorageInsightsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[StorageInsightsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class StorageInsightsClient(metaclass=StorageInsightsClientMeta): + """Service describing handlers for resources""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "storageinsights.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StorageInsightsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StorageInsightsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> StorageInsightsTransport: + """Returns the transport used by the client instance. + + Returns: + StorageInsightsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def report_config_path( + project: str, + location: str, + report_config: str, + ) -> str: + """Returns a fully-qualified report_config string.""" + return "projects/{project}/locations/{location}/reportConfigs/{report_config}".format( + project=project, + location=location, + report_config=report_config, + ) + + @staticmethod + def parse_report_config_path(path: str) -> Dict[str, str]: + """Parses a report_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/reportConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def report_detail_path( + project: str, + location: str, + report_config: str, + report_detail: str, + ) -> str: + """Returns a fully-qualified report_detail string.""" + return "projects/{project}/locations/{location}/reportConfigs/{report_config}/reportDetails/{report_detail}".format( + project=project, + location=location, + report_config=report_config, + report_detail=report_detail, + ) + + @staticmethod + def parse_report_detail_path(path: str) -> Dict[str, str]: + """Parses a report_detail path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/reportConfigs/(?P.+?)/reportDetails/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, StorageInsightsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the storage insights client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, StorageInsightsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, StorageInsightsTransport): + # transport is a StorageInsightsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_report_configs( + self, + request: Optional[Union[storageinsights.ListReportConfigsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListReportConfigsPager: + r"""Lists ReportConfigs in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storageinsights_v1 + + def sample_list_report_configs(): + # Create a client + client = storageinsights_v1.StorageInsightsClient() + + # Initialize request argument(s) + request = storageinsights_v1.ListReportConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_report_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.storageinsights_v1.types.ListReportConfigsRequest, dict]): + The request object. Message for requesting list of + ReportConfigs + parent (str): + Required. Parent value for + ListReportConfigsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.storageinsights_v1.services.storage_insights.pagers.ListReportConfigsPager: + Message for response to listing + ReportConfigs + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a storageinsights.ListReportConfigsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, storageinsights.ListReportConfigsRequest): + request = storageinsights.ListReportConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_report_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListReportConfigsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_report_config( + self, + request: Optional[Union[storageinsights.GetReportConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storageinsights.ReportConfig: + r"""Gets details of a single ReportConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storageinsights_v1 + + def sample_get_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsClient() + + # Initialize request argument(s) + request = storageinsights_v1.GetReportConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_report_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.storageinsights_v1.types.GetReportConfigRequest, dict]): + The request object. Message for getting a ReportConfig + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.storageinsights_v1.types.ReportConfig: + Message describing ReportConfig + object. ReportConfig is the + configuration to generate reports. Next + ID: 12 + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a storageinsights.GetReportConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, storageinsights.GetReportConfigRequest): + request = storageinsights.GetReportConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_report_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_report_config( + self, + request: Optional[ + Union[storageinsights.CreateReportConfigRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + report_config: Optional[storageinsights.ReportConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storageinsights.ReportConfig: + r"""Creates a new ReportConfig in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storageinsights_v1 + + def sample_create_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsClient() + + # Initialize request argument(s) + request = storageinsights_v1.CreateReportConfigRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_report_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.storageinsights_v1.types.CreateReportConfigRequest, dict]): + The request object. Message for creating a ReportConfig + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + report_config (google.cloud.storageinsights_v1.types.ReportConfig): + Required. The resource being created + This corresponds to the ``report_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.storageinsights_v1.types.ReportConfig: + Message describing ReportConfig + object. ReportConfig is the + configuration to generate reports. Next + ID: 12 + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, report_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a storageinsights.CreateReportConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, storageinsights.CreateReportConfigRequest): + request = storageinsights.CreateReportConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if report_config is not None: + request.report_config = report_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_report_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_report_config( + self, + request: Optional[ + Union[storageinsights.UpdateReportConfigRequest, dict] + ] = None, + *, + report_config: Optional[storageinsights.ReportConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storageinsights.ReportConfig: + r"""Updates the parameters of a single ReportConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storageinsights_v1 + + def sample_update_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsClient() + + # Initialize request argument(s) + request = storageinsights_v1.UpdateReportConfigRequest( + ) + + # Make the request + response = client.update_report_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.storageinsights_v1.types.UpdateReportConfigRequest, dict]): + The request object. Message for updating a ReportConfig + report_config (google.cloud.storageinsights_v1.types.ReportConfig): + Required. The resource being updated + This corresponds to the ``report_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ReportConfig resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.storageinsights_v1.types.ReportConfig: + Message describing ReportConfig + object. ReportConfig is the + configuration to generate reports. Next + ID: 12 + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([report_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a storageinsights.UpdateReportConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, storageinsights.UpdateReportConfigRequest): + request = storageinsights.UpdateReportConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if report_config is not None: + request.report_config = report_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_report_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("report_config.name", request.report_config.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_report_config( + self, + request: Optional[ + Union[storageinsights.DeleteReportConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a single ReportConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storageinsights_v1 + + def sample_delete_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsClient() + + # Initialize request argument(s) + request = storageinsights_v1.DeleteReportConfigRequest( + name="name_value", + ) + + # Make the request + client.delete_report_config(request=request) + + Args: + request (Union[google.cloud.storageinsights_v1.types.DeleteReportConfigRequest, dict]): + The request object. Message for deleting a ReportConfig + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a storageinsights.DeleteReportConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, storageinsights.DeleteReportConfigRequest): + request = storageinsights.DeleteReportConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_report_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_report_details( + self, + request: Optional[Union[storageinsights.ListReportDetailsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListReportDetailsPager: + r"""Lists ReportDetails in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storageinsights_v1 + + def sample_list_report_details(): + # Create a client + client = storageinsights_v1.StorageInsightsClient() + + # Initialize request argument(s) + request = storageinsights_v1.ListReportDetailsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_report_details(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.storageinsights_v1.types.ListReportDetailsRequest, dict]): + The request object. Message for requesting list of + ReportDetails + parent (str): + Required. Parent value for + ListReportDetailsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.storageinsights_v1.services.storage_insights.pagers.ListReportDetailsPager: + Message for response to listing + ReportDetails + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a storageinsights.ListReportDetailsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, storageinsights.ListReportDetailsRequest): + request = storageinsights.ListReportDetailsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_report_details] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListReportDetailsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_report_detail( + self, + request: Optional[Union[storageinsights.GetReportDetailRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storageinsights.ReportDetail: + r"""Gets details of a single ReportDetail. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storageinsights_v1 + + def sample_get_report_detail(): + # Create a client + client = storageinsights_v1.StorageInsightsClient() + + # Initialize request argument(s) + request = storageinsights_v1.GetReportDetailRequest( + name="name_value", + ) + + # Make the request + response = client.get_report_detail(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.storageinsights_v1.types.GetReportDetailRequest, dict]): + The request object. Message for getting a ReportDetail + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.storageinsights_v1.types.ReportDetail: + Message describing ReportDetail + object. ReportDetail represents metadata + of generated reports for a ReportConfig. + Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a storageinsights.GetReportDetailRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, storageinsights.GetReportDetailRequest): + request = storageinsights.GetReportDetailRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_report_detail] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "StorageInsightsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("StorageInsightsClient",) diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/pagers.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/pagers.py new file mode 100644 index 000000000000..0565ae78d62e --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/pagers.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.storageinsights_v1.types import storageinsights + + +class ListReportConfigsPager: + """A pager for iterating through ``list_report_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.storageinsights_v1.types.ListReportConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``report_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListReportConfigs`` requests and continue to iterate + through the ``report_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.storageinsights_v1.types.ListReportConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., storageinsights.ListReportConfigsResponse], + request: storageinsights.ListReportConfigsRequest, + response: storageinsights.ListReportConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.storageinsights_v1.types.ListReportConfigsRequest): + The initial request object. + response (google.cloud.storageinsights_v1.types.ListReportConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = storageinsights.ListReportConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[storageinsights.ListReportConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[storageinsights.ReportConfig]: + for page in self.pages: + yield from page.report_configs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListReportConfigsAsyncPager: + """A pager for iterating through ``list_report_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.storageinsights_v1.types.ListReportConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``report_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListReportConfigs`` requests and continue to iterate + through the ``report_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.storageinsights_v1.types.ListReportConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[storageinsights.ListReportConfigsResponse]], + request: storageinsights.ListReportConfigsRequest, + response: storageinsights.ListReportConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.storageinsights_v1.types.ListReportConfigsRequest): + The initial request object. + response (google.cloud.storageinsights_v1.types.ListReportConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = storageinsights.ListReportConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[storageinsights.ListReportConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[storageinsights.ReportConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.report_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListReportDetailsPager: + """A pager for iterating through ``list_report_details`` requests. + + This class thinly wraps an initial + :class:`google.cloud.storageinsights_v1.types.ListReportDetailsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``report_details`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListReportDetails`` requests and continue to iterate + through the ``report_details`` field on the + corresponding responses. + + All the usual :class:`google.cloud.storageinsights_v1.types.ListReportDetailsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., storageinsights.ListReportDetailsResponse], + request: storageinsights.ListReportDetailsRequest, + response: storageinsights.ListReportDetailsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.storageinsights_v1.types.ListReportDetailsRequest): + The initial request object. + response (google.cloud.storageinsights_v1.types.ListReportDetailsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = storageinsights.ListReportDetailsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[storageinsights.ListReportDetailsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[storageinsights.ReportDetail]: + for page in self.pages: + yield from page.report_details + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListReportDetailsAsyncPager: + """A pager for iterating through ``list_report_details`` requests. + + This class thinly wraps an initial + :class:`google.cloud.storageinsights_v1.types.ListReportDetailsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``report_details`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListReportDetails`` requests and continue to iterate + through the ``report_details`` field on the + corresponding responses. + + All the usual :class:`google.cloud.storageinsights_v1.types.ListReportDetailsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[storageinsights.ListReportDetailsResponse]], + request: storageinsights.ListReportDetailsRequest, + response: storageinsights.ListReportDetailsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.storageinsights_v1.types.ListReportDetailsRequest): + The initial request object. + response (google.cloud.storageinsights_v1.types.ListReportDetailsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = storageinsights.ListReportDetailsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[storageinsights.ListReportDetailsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[storageinsights.ReportDetail]: + async def async_generator(): + async for page in self.pages: + for response in page.report_details: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/transports/__init__.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/transports/__init__.py new file mode 100644 index 000000000000..00b96f606e31 --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import StorageInsightsTransport +from .grpc import StorageInsightsGrpcTransport +from .grpc_asyncio import StorageInsightsGrpcAsyncIOTransport +from .rest import StorageInsightsRestInterceptor, StorageInsightsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[StorageInsightsTransport]] +_transport_registry["grpc"] = StorageInsightsGrpcTransport +_transport_registry["grpc_asyncio"] = StorageInsightsGrpcAsyncIOTransport +_transport_registry["rest"] = StorageInsightsRestTransport + +__all__ = ( + "StorageInsightsTransport", + "StorageInsightsGrpcTransport", + "StorageInsightsGrpcAsyncIOTransport", + "StorageInsightsRestTransport", + "StorageInsightsRestInterceptor", +) diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/transports/base.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/transports/base.py new file mode 100644 index 000000000000..4c92c35c1097 --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/transports/base.py @@ -0,0 +1,338 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.storageinsights_v1 import gapic_version as package_version +from google.cloud.storageinsights_v1.types import storageinsights + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class StorageInsightsTransport(abc.ABC): + """Abstract transport class for StorageInsights.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "storageinsights.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_report_configs: gapic_v1.method.wrap_method( + self.list_report_configs, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_report_config: gapic_v1.method.wrap_method( + self.get_report_config, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_report_config: gapic_v1.method.wrap_method( + self.create_report_config, + default_timeout=None, + client_info=client_info, + ), + self.update_report_config: gapic_v1.method.wrap_method( + self.update_report_config, + default_timeout=None, + client_info=client_info, + ), + self.delete_report_config: gapic_v1.method.wrap_method( + self.delete_report_config, + default_timeout=None, + client_info=client_info, + ), + self.list_report_details: gapic_v1.method.wrap_method( + self.list_report_details, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_report_detail: gapic_v1.method.wrap_method( + self.get_report_detail, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_report_configs( + self, + ) -> Callable[ + [storageinsights.ListReportConfigsRequest], + Union[ + storageinsights.ListReportConfigsResponse, + Awaitable[storageinsights.ListReportConfigsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_report_config( + self, + ) -> Callable[ + [storageinsights.GetReportConfigRequest], + Union[storageinsights.ReportConfig, Awaitable[storageinsights.ReportConfig]], + ]: + raise NotImplementedError() + + @property + def create_report_config( + self, + ) -> Callable[ + [storageinsights.CreateReportConfigRequest], + Union[storageinsights.ReportConfig, Awaitable[storageinsights.ReportConfig]], + ]: + raise NotImplementedError() + + @property + def update_report_config( + self, + ) -> Callable[ + [storageinsights.UpdateReportConfigRequest], + Union[storageinsights.ReportConfig, Awaitable[storageinsights.ReportConfig]], + ]: + raise NotImplementedError() + + @property + def delete_report_config( + self, + ) -> Callable[ + [storageinsights.DeleteReportConfigRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_report_details( + self, + ) -> Callable[ + [storageinsights.ListReportDetailsRequest], + Union[ + storageinsights.ListReportDetailsResponse, + Awaitable[storageinsights.ListReportDetailsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_report_detail( + self, + ) -> Callable[ + [storageinsights.GetReportDetailRequest], + Union[storageinsights.ReportDetail, Awaitable[storageinsights.ReportDetail]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("StorageInsightsTransport",) diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/transports/grpc.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/transports/grpc.py new file mode 100644 index 000000000000..33c6ed6c27d0 --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/transports/grpc.py @@ -0,0 +1,545 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.storageinsights_v1.types import storageinsights + +from .base import DEFAULT_CLIENT_INFO, StorageInsightsTransport + + +class StorageInsightsGrpcTransport(StorageInsightsTransport): + """gRPC backend transport for StorageInsights. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "storageinsights.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "storageinsights.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_report_configs( + self, + ) -> Callable[ + [storageinsights.ListReportConfigsRequest], + storageinsights.ListReportConfigsResponse, + ]: + r"""Return a callable for the list report configs method over gRPC. + + Lists ReportConfigs in a given project and location. + + Returns: + Callable[[~.ListReportConfigsRequest], + ~.ListReportConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_report_configs" not in self._stubs: + self._stubs["list_report_configs"] = self.grpc_channel.unary_unary( + "/google.cloud.storageinsights.v1.StorageInsights/ListReportConfigs", + request_serializer=storageinsights.ListReportConfigsRequest.serialize, + response_deserializer=storageinsights.ListReportConfigsResponse.deserialize, + ) + return self._stubs["list_report_configs"] + + @property + def get_report_config( + self, + ) -> Callable[ + [storageinsights.GetReportConfigRequest], storageinsights.ReportConfig + ]: + r"""Return a callable for the get report config method over gRPC. + + Gets details of a single ReportConfig. + + Returns: + Callable[[~.GetReportConfigRequest], + ~.ReportConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_report_config" not in self._stubs: + self._stubs["get_report_config"] = self.grpc_channel.unary_unary( + "/google.cloud.storageinsights.v1.StorageInsights/GetReportConfig", + request_serializer=storageinsights.GetReportConfigRequest.serialize, + response_deserializer=storageinsights.ReportConfig.deserialize, + ) + return self._stubs["get_report_config"] + + @property + def create_report_config( + self, + ) -> Callable[ + [storageinsights.CreateReportConfigRequest], storageinsights.ReportConfig + ]: + r"""Return a callable for the create report config method over gRPC. + + Creates a new ReportConfig in a given project and + location. + + Returns: + Callable[[~.CreateReportConfigRequest], + ~.ReportConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_report_config" not in self._stubs: + self._stubs["create_report_config"] = self.grpc_channel.unary_unary( + "/google.cloud.storageinsights.v1.StorageInsights/CreateReportConfig", + request_serializer=storageinsights.CreateReportConfigRequest.serialize, + response_deserializer=storageinsights.ReportConfig.deserialize, + ) + return self._stubs["create_report_config"] + + @property + def update_report_config( + self, + ) -> Callable[ + [storageinsights.UpdateReportConfigRequest], storageinsights.ReportConfig + ]: + r"""Return a callable for the update report config method over gRPC. + + Updates the parameters of a single ReportConfig. + + Returns: + Callable[[~.UpdateReportConfigRequest], + ~.ReportConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_report_config" not in self._stubs: + self._stubs["update_report_config"] = self.grpc_channel.unary_unary( + "/google.cloud.storageinsights.v1.StorageInsights/UpdateReportConfig", + request_serializer=storageinsights.UpdateReportConfigRequest.serialize, + response_deserializer=storageinsights.ReportConfig.deserialize, + ) + return self._stubs["update_report_config"] + + @property + def delete_report_config( + self, + ) -> Callable[[storageinsights.DeleteReportConfigRequest], empty_pb2.Empty]: + r"""Return a callable for the delete report config method over gRPC. + + Deletes a single ReportConfig. + + Returns: + Callable[[~.DeleteReportConfigRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_report_config" not in self._stubs: + self._stubs["delete_report_config"] = self.grpc_channel.unary_unary( + "/google.cloud.storageinsights.v1.StorageInsights/DeleteReportConfig", + request_serializer=storageinsights.DeleteReportConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_report_config"] + + @property + def list_report_details( + self, + ) -> Callable[ + [storageinsights.ListReportDetailsRequest], + storageinsights.ListReportDetailsResponse, + ]: + r"""Return a callable for the list report details method over gRPC. + + Lists ReportDetails in a given project and location. + + Returns: + Callable[[~.ListReportDetailsRequest], + ~.ListReportDetailsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_report_details" not in self._stubs: + self._stubs["list_report_details"] = self.grpc_channel.unary_unary( + "/google.cloud.storageinsights.v1.StorageInsights/ListReportDetails", + request_serializer=storageinsights.ListReportDetailsRequest.serialize, + response_deserializer=storageinsights.ListReportDetailsResponse.deserialize, + ) + return self._stubs["list_report_details"] + + @property + def get_report_detail( + self, + ) -> Callable[ + [storageinsights.GetReportDetailRequest], storageinsights.ReportDetail + ]: + r"""Return a callable for the get report detail method over gRPC. + + Gets details of a single ReportDetail. + + Returns: + Callable[[~.GetReportDetailRequest], + ~.ReportDetail]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_report_detail" not in self._stubs: + self._stubs["get_report_detail"] = self.grpc_channel.unary_unary( + "/google.cloud.storageinsights.v1.StorageInsights/GetReportDetail", + request_serializer=storageinsights.GetReportDetailRequest.serialize, + response_deserializer=storageinsights.ReportDetail.deserialize, + ) + return self._stubs["get_report_detail"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("StorageInsightsGrpcTransport",) diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/transports/grpc_asyncio.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a2b4d0f31cdb --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/transports/grpc_asyncio.py @@ -0,0 +1,550 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.storageinsights_v1.types import storageinsights + +from .base import DEFAULT_CLIENT_INFO, StorageInsightsTransport +from .grpc import StorageInsightsGrpcTransport + + +class StorageInsightsGrpcAsyncIOTransport(StorageInsightsTransport): + """gRPC AsyncIO backend transport for StorageInsights. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "storageinsights.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "storageinsights.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_report_configs( + self, + ) -> Callable[ + [storageinsights.ListReportConfigsRequest], + Awaitable[storageinsights.ListReportConfigsResponse], + ]: + r"""Return a callable for the list report configs method over gRPC. + + Lists ReportConfigs in a given project and location. + + Returns: + Callable[[~.ListReportConfigsRequest], + Awaitable[~.ListReportConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_report_configs" not in self._stubs: + self._stubs["list_report_configs"] = self.grpc_channel.unary_unary( + "/google.cloud.storageinsights.v1.StorageInsights/ListReportConfigs", + request_serializer=storageinsights.ListReportConfigsRequest.serialize, + response_deserializer=storageinsights.ListReportConfigsResponse.deserialize, + ) + return self._stubs["list_report_configs"] + + @property + def get_report_config( + self, + ) -> Callable[ + [storageinsights.GetReportConfigRequest], + Awaitable[storageinsights.ReportConfig], + ]: + r"""Return a callable for the get report config method over gRPC. + + Gets details of a single ReportConfig. + + Returns: + Callable[[~.GetReportConfigRequest], + Awaitable[~.ReportConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_report_config" not in self._stubs: + self._stubs["get_report_config"] = self.grpc_channel.unary_unary( + "/google.cloud.storageinsights.v1.StorageInsights/GetReportConfig", + request_serializer=storageinsights.GetReportConfigRequest.serialize, + response_deserializer=storageinsights.ReportConfig.deserialize, + ) + return self._stubs["get_report_config"] + + @property + def create_report_config( + self, + ) -> Callable[ + [storageinsights.CreateReportConfigRequest], + Awaitable[storageinsights.ReportConfig], + ]: + r"""Return a callable for the create report config method over gRPC. + + Creates a new ReportConfig in a given project and + location. + + Returns: + Callable[[~.CreateReportConfigRequest], + Awaitable[~.ReportConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_report_config" not in self._stubs: + self._stubs["create_report_config"] = self.grpc_channel.unary_unary( + "/google.cloud.storageinsights.v1.StorageInsights/CreateReportConfig", + request_serializer=storageinsights.CreateReportConfigRequest.serialize, + response_deserializer=storageinsights.ReportConfig.deserialize, + ) + return self._stubs["create_report_config"] + + @property + def update_report_config( + self, + ) -> Callable[ + [storageinsights.UpdateReportConfigRequest], + Awaitable[storageinsights.ReportConfig], + ]: + r"""Return a callable for the update report config method over gRPC. + + Updates the parameters of a single ReportConfig. + + Returns: + Callable[[~.UpdateReportConfigRequest], + Awaitable[~.ReportConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_report_config" not in self._stubs: + self._stubs["update_report_config"] = self.grpc_channel.unary_unary( + "/google.cloud.storageinsights.v1.StorageInsights/UpdateReportConfig", + request_serializer=storageinsights.UpdateReportConfigRequest.serialize, + response_deserializer=storageinsights.ReportConfig.deserialize, + ) + return self._stubs["update_report_config"] + + @property + def delete_report_config( + self, + ) -> Callable[ + [storageinsights.DeleteReportConfigRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete report config method over gRPC. + + Deletes a single ReportConfig. + + Returns: + Callable[[~.DeleteReportConfigRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_report_config" not in self._stubs: + self._stubs["delete_report_config"] = self.grpc_channel.unary_unary( + "/google.cloud.storageinsights.v1.StorageInsights/DeleteReportConfig", + request_serializer=storageinsights.DeleteReportConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_report_config"] + + @property + def list_report_details( + self, + ) -> Callable[ + [storageinsights.ListReportDetailsRequest], + Awaitable[storageinsights.ListReportDetailsResponse], + ]: + r"""Return a callable for the list report details method over gRPC. + + Lists ReportDetails in a given project and location. + + Returns: + Callable[[~.ListReportDetailsRequest], + Awaitable[~.ListReportDetailsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_report_details" not in self._stubs: + self._stubs["list_report_details"] = self.grpc_channel.unary_unary( + "/google.cloud.storageinsights.v1.StorageInsights/ListReportDetails", + request_serializer=storageinsights.ListReportDetailsRequest.serialize, + response_deserializer=storageinsights.ListReportDetailsResponse.deserialize, + ) + return self._stubs["list_report_details"] + + @property + def get_report_detail( + self, + ) -> Callable[ + [storageinsights.GetReportDetailRequest], + Awaitable[storageinsights.ReportDetail], + ]: + r"""Return a callable for the get report detail method over gRPC. + + Gets details of a single ReportDetail. + + Returns: + Callable[[~.GetReportDetailRequest], + Awaitable[~.ReportDetail]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_report_detail" not in self._stubs: + self._stubs["get_report_detail"] = self.grpc_channel.unary_unary( + "/google.cloud.storageinsights.v1.StorageInsights/GetReportDetail", + request_serializer=storageinsights.GetReportDetailRequest.serialize, + response_deserializer=storageinsights.ReportDetail.deserialize, + ) + return self._stubs["get_report_detail"] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("StorageInsightsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/transports/rest.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/transports/rest.py new file mode 100644 index 000000000000..6584a6eb6170 --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/services/storage_insights/transports/rest.py @@ -0,0 +1,1630 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.storageinsights_v1.types import storageinsights + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import StorageInsightsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class StorageInsightsRestInterceptor: + """Interceptor for StorageInsights. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the StorageInsightsRestTransport. + + .. code-block:: python + class MyCustomStorageInsightsInterceptor(StorageInsightsRestInterceptor): + def pre_create_report_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_report_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_report_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_report_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_report_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_report_detail(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_report_detail(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_report_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_report_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_report_details(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_report_details(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_report_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_report_config(self, response): + logging.log(f"Received response: {response}") + return response + + transport = StorageInsightsRestTransport(interceptor=MyCustomStorageInsightsInterceptor()) + client = StorageInsightsClient(transport=transport) + + + """ + + def pre_create_report_config( + self, + request: storageinsights.CreateReportConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[storageinsights.CreateReportConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_report_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageInsights server. + """ + return request, metadata + + def post_create_report_config( + self, response: storageinsights.ReportConfig + ) -> storageinsights.ReportConfig: + """Post-rpc interceptor for create_report_config + + Override in a subclass to manipulate the response + after it is returned by the StorageInsights server but before + it is returned to user code. + """ + return response + + def pre_delete_report_config( + self, + request: storageinsights.DeleteReportConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[storageinsights.DeleteReportConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_report_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageInsights server. + """ + return request, metadata + + def pre_get_report_config( + self, + request: storageinsights.GetReportConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[storageinsights.GetReportConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_report_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageInsights server. + """ + return request, metadata + + def post_get_report_config( + self, response: storageinsights.ReportConfig + ) -> storageinsights.ReportConfig: + """Post-rpc interceptor for get_report_config + + Override in a subclass to manipulate the response + after it is returned by the StorageInsights server but before + it is returned to user code. + """ + return response + + def pre_get_report_detail( + self, + request: storageinsights.GetReportDetailRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[storageinsights.GetReportDetailRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_report_detail + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageInsights server. + """ + return request, metadata + + def post_get_report_detail( + self, response: storageinsights.ReportDetail + ) -> storageinsights.ReportDetail: + """Post-rpc interceptor for get_report_detail + + Override in a subclass to manipulate the response + after it is returned by the StorageInsights server but before + it is returned to user code. + """ + return response + + def pre_list_report_configs( + self, + request: storageinsights.ListReportConfigsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[storageinsights.ListReportConfigsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_report_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageInsights server. + """ + return request, metadata + + def post_list_report_configs( + self, response: storageinsights.ListReportConfigsResponse + ) -> storageinsights.ListReportConfigsResponse: + """Post-rpc interceptor for list_report_configs + + Override in a subclass to manipulate the response + after it is returned by the StorageInsights server but before + it is returned to user code. + """ + return response + + def pre_list_report_details( + self, + request: storageinsights.ListReportDetailsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[storageinsights.ListReportDetailsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_report_details + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageInsights server. + """ + return request, metadata + + def post_list_report_details( + self, response: storageinsights.ListReportDetailsResponse + ) -> storageinsights.ListReportDetailsResponse: + """Post-rpc interceptor for list_report_details + + Override in a subclass to manipulate the response + after it is returned by the StorageInsights server but before + it is returned to user code. + """ + return response + + def pre_update_report_config( + self, + request: storageinsights.UpdateReportConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[storageinsights.UpdateReportConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_report_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageInsights server. + """ + return request, metadata + + def post_update_report_config( + self, response: storageinsights.ReportConfig + ) -> storageinsights.ReportConfig: + """Post-rpc interceptor for update_report_config + + Override in a subclass to manipulate the response + after it is returned by the StorageInsights server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageInsights server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the StorageInsights server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageInsights server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the StorageInsights server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageInsights server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the StorageInsights server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageInsights server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the StorageInsights server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageInsights server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the StorageInsights server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageInsights server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the StorageInsights server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class StorageInsightsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: StorageInsightsRestInterceptor + + +class StorageInsightsRestTransport(StorageInsightsTransport): + """REST backend transport for StorageInsights. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "storageinsights.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[StorageInsightsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or StorageInsightsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateReportConfig(StorageInsightsRestStub): + def __hash__(self): + return hash("CreateReportConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: storageinsights.CreateReportConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storageinsights.ReportConfig: + r"""Call the create report config method over HTTP. + + Args: + request (~.storageinsights.CreateReportConfigRequest): + The request object. Message for creating a ReportConfig + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.storageinsights.ReportConfig: + Message describing ReportConfig + object. ReportConfig is the + configuration to generate reports. Next + ID: 12 + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/reportConfigs", + "body": "report_config", + }, + ] + request, metadata = self._interceptor.pre_create_report_config( + request, metadata + ) + pb_request = storageinsights.CreateReportConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = storageinsights.ReportConfig() + pb_resp = storageinsights.ReportConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_report_config(resp) + return resp + + class _DeleteReportConfig(StorageInsightsRestStub): + def __hash__(self): + return hash("DeleteReportConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: storageinsights.DeleteReportConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete report config method over HTTP. + + Args: + request (~.storageinsights.DeleteReportConfigRequest): + The request object. Message for deleting a ReportConfig + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/reportConfigs/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_report_config( + request, metadata + ) + pb_request = storageinsights.DeleteReportConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetReportConfig(StorageInsightsRestStub): + def __hash__(self): + return hash("GetReportConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: storageinsights.GetReportConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storageinsights.ReportConfig: + r"""Call the get report config method over HTTP. + + Args: + request (~.storageinsights.GetReportConfigRequest): + The request object. Message for getting a ReportConfig + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.storageinsights.ReportConfig: + Message describing ReportConfig + object. ReportConfig is the + configuration to generate reports. Next + ID: 12 + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/reportConfigs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_report_config( + request, metadata + ) + pb_request = storageinsights.GetReportConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = storageinsights.ReportConfig() + pb_resp = storageinsights.ReportConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_report_config(resp) + return resp + + class _GetReportDetail(StorageInsightsRestStub): + def __hash__(self): + return hash("GetReportDetail") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: storageinsights.GetReportDetailRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storageinsights.ReportDetail: + r"""Call the get report detail method over HTTP. + + Args: + request (~.storageinsights.GetReportDetailRequest): + The request object. Message for getting a ReportDetail + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.storageinsights.ReportDetail: + Message describing ReportDetail + object. ReportDetail represents metadata + of generated reports for a ReportConfig. + Next ID: 10 + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/reportConfigs/*/reportDetails/*}", + }, + ] + request, metadata = self._interceptor.pre_get_report_detail( + request, metadata + ) + pb_request = storageinsights.GetReportDetailRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = storageinsights.ReportDetail() + pb_resp = storageinsights.ReportDetail.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_report_detail(resp) + return resp + + class _ListReportConfigs(StorageInsightsRestStub): + def __hash__(self): + return hash("ListReportConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: storageinsights.ListReportConfigsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storageinsights.ListReportConfigsResponse: + r"""Call the list report configs method over HTTP. + + Args: + request (~.storageinsights.ListReportConfigsRequest): + The request object. Message for requesting list of + ReportConfigs + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.storageinsights.ListReportConfigsResponse: + Message for response to listing + ReportConfigs + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/reportConfigs", + }, + ] + request, metadata = self._interceptor.pre_list_report_configs( + request, metadata + ) + pb_request = storageinsights.ListReportConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = storageinsights.ListReportConfigsResponse() + pb_resp = storageinsights.ListReportConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_report_configs(resp) + return resp + + class _ListReportDetails(StorageInsightsRestStub): + def __hash__(self): + return hash("ListReportDetails") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: storageinsights.ListReportDetailsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storageinsights.ListReportDetailsResponse: + r"""Call the list report details method over HTTP. + + Args: + request (~.storageinsights.ListReportDetailsRequest): + The request object. Message for requesting list of + ReportDetails + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.storageinsights.ListReportDetailsResponse: + Message for response to listing + ReportDetails + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/reportConfigs/*}/reportDetails", + }, + ] + request, metadata = self._interceptor.pre_list_report_details( + request, metadata + ) + pb_request = storageinsights.ListReportDetailsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = storageinsights.ListReportDetailsResponse() + pb_resp = storageinsights.ListReportDetailsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_report_details(resp) + return resp + + class _UpdateReportConfig(StorageInsightsRestStub): + def __hash__(self): + return hash("UpdateReportConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: storageinsights.UpdateReportConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storageinsights.ReportConfig: + r"""Call the update report config method over HTTP. + + Args: + request (~.storageinsights.UpdateReportConfigRequest): + The request object. Message for updating a ReportConfig + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.storageinsights.ReportConfig: + Message describing ReportConfig + object. ReportConfig is the + configuration to generate reports. Next + ID: 12 + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{report_config.name=projects/*/locations/*/reportConfigs/*}", + "body": "report_config", + }, + ] + request, metadata = self._interceptor.pre_update_report_config( + request, metadata + ) + pb_request = storageinsights.UpdateReportConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = storageinsights.ReportConfig() + pb_resp = storageinsights.ReportConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_report_config(resp) + return resp + + @property + def create_report_config( + self, + ) -> Callable[ + [storageinsights.CreateReportConfigRequest], storageinsights.ReportConfig + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateReportConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_report_config( + self, + ) -> Callable[[storageinsights.DeleteReportConfigRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteReportConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_report_config( + self, + ) -> Callable[ + [storageinsights.GetReportConfigRequest], storageinsights.ReportConfig + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetReportConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_report_detail( + self, + ) -> Callable[ + [storageinsights.GetReportDetailRequest], storageinsights.ReportDetail + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetReportDetail(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_report_configs( + self, + ) -> Callable[ + [storageinsights.ListReportConfigsRequest], + storageinsights.ListReportConfigsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListReportConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_report_details( + self, + ) -> Callable[ + [storageinsights.ListReportDetailsRequest], + storageinsights.ListReportDetailsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListReportDetails(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_report_config( + self, + ) -> Callable[ + [storageinsights.UpdateReportConfigRequest], storageinsights.ReportConfig + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateReportConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(StorageInsightsRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(StorageInsightsRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(StorageInsightsRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(StorageInsightsRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(StorageInsightsRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(StorageInsightsRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("StorageInsightsRestTransport",) diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/types/__init__.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/types/__init__.py new file mode 100644 index 000000000000..7b4add048011 --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/types/__init__.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .storageinsights import ( + CloudStorageDestinationOptions, + CloudStorageFilters, + CreateReportConfigRequest, + CSVOptions, + DeleteReportConfigRequest, + FrequencyOptions, + GetReportConfigRequest, + GetReportDetailRequest, + ListReportConfigsRequest, + ListReportConfigsResponse, + ListReportDetailsRequest, + ListReportDetailsResponse, + ObjectMetadataReportOptions, + OperationMetadata, + ParquetOptions, + ReportConfig, + ReportDetail, + UpdateReportConfigRequest, +) + +__all__ = ( + "CloudStorageDestinationOptions", + "CloudStorageFilters", + "CreateReportConfigRequest", + "CSVOptions", + "DeleteReportConfigRequest", + "FrequencyOptions", + "GetReportConfigRequest", + "GetReportDetailRequest", + "ListReportConfigsRequest", + "ListReportConfigsResponse", + "ListReportDetailsRequest", + "ListReportDetailsResponse", + "ObjectMetadataReportOptions", + "OperationMetadata", + "ParquetOptions", + "ReportConfig", + "ReportDetail", + "UpdateReportConfigRequest", +) diff --git a/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/types/storageinsights.py b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/types/storageinsights.py new file mode 100644 index 000000000000..a2545dfde6c1 --- /dev/null +++ b/packages/google-cloud-storageinsights/google/cloud/storageinsights_v1/types/storageinsights.py @@ -0,0 +1,758 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.storageinsights.v1", + manifest={ + "ListReportConfigsRequest", + "ListReportConfigsResponse", + "GetReportConfigRequest", + "CreateReportConfigRequest", + "UpdateReportConfigRequest", + "DeleteReportConfigRequest", + "ReportDetail", + "ListReportDetailsRequest", + "ListReportDetailsResponse", + "GetReportDetailRequest", + "OperationMetadata", + "FrequencyOptions", + "CSVOptions", + "ParquetOptions", + "CloudStorageFilters", + "CloudStorageDestinationOptions", + "ObjectMetadataReportOptions", + "ReportConfig", + }, +) + + +class ListReportConfigsRequest(proto.Message): + r"""Message for requesting list of ReportConfigs + + Attributes: + parent (str): + Required. Parent value for + ListReportConfigsRequest + page_size (int): + Requested page size. Server may return fewer + items than requested. If unspecified, server + will pick an appropriate default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Filtering results + order_by (str): + Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListReportConfigsResponse(proto.Message): + r"""Message for response to listing ReportConfigs + + Attributes: + report_configs (MutableSequence[google.cloud.storageinsights_v1.types.ReportConfig]): + The list of ReportConfig + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + report_configs: MutableSequence["ReportConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ReportConfig", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetReportConfigRequest(proto.Message): + r"""Message for getting a ReportConfig + + Attributes: + name (str): + Required. Name of the resource + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateReportConfigRequest(proto.Message): + r"""Message for creating a ReportConfig + + Attributes: + parent (str): + Required. Value for parent. + report_config (google.cloud.storageinsights_v1.types.ReportConfig): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + report_config: "ReportConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="ReportConfig", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateReportConfigRequest(proto.Message): + r"""Message for updating a ReportConfig + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ReportConfig resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + report_config (google.cloud.storageinsights_v1.types.ReportConfig): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + report_config: "ReportConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="ReportConfig", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteReportConfigRequest(proto.Message): + r"""Message for deleting a ReportConfig + + Attributes: + name (str): + Required. Name of the resource + force (bool): + Optional. If set, all ReportDetails for this + ReportConfig will be deleted. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ReportDetail(proto.Message): + r"""Message describing ReportDetail object. ReportDetail + represents metadata of generated reports for a ReportConfig. + Next ID: 10 + + Attributes: + name (str): + Name of resource. It will be of form + projects//locations//reportConfigs//reportDetails/. + snapshot_time (google.protobuf.timestamp_pb2.Timestamp): + The snapshot time. + All the report data is referenced at this point + of time. + report_path_prefix (str): + Prefix of the object name of each report's shard. This will + have full prefix except the "extension" and "shard_id". For + example, if the ``destination_path`` is + ``{{report-config-id}}/dt={{datetime}}``, the shard object + name would be + ``gs://my-insights/1A34-F2E456-12B456-1C3D/dt=2022-05-20T06:35/1A34-F2E456-12B456-1C3D_2022-05-20T06:35_5.csv`` + and the value of ``report_path_prefix`` field would be + ``gs://my-insights/1A34-F2E456-12B456-1C3D/dt=2022-05-20T06:35/1A34-F2E456-12B456-1C3D_2022-05-20T06:35_``. + shards_count (int): + Total shards generated for the report. + status (google.rpc.status_pb2.Status): + Status of the ReportDetail. + labels (MutableMapping[str, str]): + Labels as key value pairs + target_datetime (google.type.datetime_pb2.DateTime): + The date for which report is generated. The time part of + target_datetime will be zero till we support multiple + reports per day. + report_metrics (google.cloud.storageinsights_v1.types.ReportDetail.Metrics): + Metrics of the report. + """ + + class Metrics(proto.Message): + r"""Different metrics associated with the generated report. + + Attributes: + processed_records_count (int): + Count of Cloud Storage objects which are part + of the report. + """ + + processed_records_count: int = proto.Field( + proto.INT64, + number=1, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + snapshot_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + report_path_prefix: str = proto.Field( + proto.STRING, + number=8, + ) + shards_count: int = proto.Field( + proto.INT64, + number=9, + ) + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + target_datetime: datetime_pb2.DateTime = proto.Field( + proto.MESSAGE, + number=6, + message=datetime_pb2.DateTime, + ) + report_metrics: Metrics = proto.Field( + proto.MESSAGE, + number=7, + message=Metrics, + ) + + +class ListReportDetailsRequest(proto.Message): + r"""Message for requesting list of ReportDetails + + Attributes: + parent (str): + Required. Parent value for + ListReportDetailsRequest + page_size (int): + Requested page size. Server may return fewer + items than requested. If unspecified, server + will pick an appropriate default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Filtering results + order_by (str): + Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListReportDetailsResponse(proto.Message): + r"""Message for response to listing ReportDetails + + Attributes: + report_details (MutableSequence[google.cloud.storageinsights_v1.types.ReportDetail]): + The list of ReportDetail + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + report_details: MutableSequence["ReportDetail"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ReportDetail", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetReportDetailRequest(proto.Message): + r"""Message for getting a ReportDetail + + Attributes: + name (str): + Required. Name of the resource + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have been + cancelled successfully have [Operation.error][] value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class FrequencyOptions(proto.Message): + r"""ReportConfig Resource: + Options to setup frequency of report generation. + + Attributes: + frequency (google.cloud.storageinsights_v1.types.FrequencyOptions.Frequency): + Frequency of report generation. + start_date (google.type.date_pb2.Date): + The date from which report generation should + start. UTC time zone. + end_date (google.type.date_pb2.Date): + The date on which report generation should + stop (Inclusive). UTC time zone. + """ + + class Frequency(proto.Enum): + r"""This ENUM specifies possible frequencies of report + generation. + + Values: + FREQUENCY_UNSPECIFIED (0): + Unspecified. + DAILY (1): + Report will be generated daily. + WEEKLY (2): + Report will be generated weekly. + """ + FREQUENCY_UNSPECIFIED = 0 + DAILY = 1 + WEEKLY = 2 + + frequency: Frequency = proto.Field( + proto.ENUM, + number=1, + enum=Frequency, + ) + start_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=2, + message=date_pb2.Date, + ) + end_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=3, + message=date_pb2.Date, + ) + + +class CSVOptions(proto.Message): + r"""Options to configure CSV formatted reports. + + Attributes: + record_separator (str): + Record separator characters in CSV. + delimiter (str): + Delimiter characters in CSV. + header_required (bool): + If set, will include a header row in the CSV + report. + """ + + record_separator: str = proto.Field( + proto.STRING, + number=1, + ) + delimiter: str = proto.Field( + proto.STRING, + number=2, + ) + header_required: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ParquetOptions(proto.Message): + r"""Options to configure Parquet formatted reports.""" + + +class CloudStorageFilters(proto.Message): + r"""Options to filter data on storage systems. + Next ID: 2 + + Attributes: + bucket (str): + Bucket for which the report will be + generated. + """ + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CloudStorageDestinationOptions(proto.Message): + r"""Options to store reports in storage systems. + Next ID: 3 + + Attributes: + bucket (str): + Destination bucket. + destination_path (str): + Destination path is the path in the bucket + where the report should be generated. + """ + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + destination_path: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ObjectMetadataReportOptions(proto.Message): + r"""Report specification for exporting object metadata. + Next ID: 4 + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + metadata_fields (MutableSequence[str]): + Metadata fields to be included in the report. + storage_filters (google.cloud.storageinsights_v1.types.CloudStorageFilters): + Cloud Storage as the storage system. + + This field is a member of `oneof`_ ``filter``. + storage_destination_options (google.cloud.storageinsights_v1.types.CloudStorageDestinationOptions): + Cloud Storage as the storage system. + + This field is a member of `oneof`_ ``destination_options``. + """ + + metadata_fields: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + storage_filters: "CloudStorageFilters" = proto.Field( + proto.MESSAGE, + number=2, + oneof="filter", + message="CloudStorageFilters", + ) + storage_destination_options: "CloudStorageDestinationOptions" = proto.Field( + proto.MESSAGE, + number=3, + oneof="destination_options", + message="CloudStorageDestinationOptions", + ) + + +class ReportConfig(proto.Message): + r"""Message describing ReportConfig object. ReportConfig is the + configuration to generate reports. + Next ID: 12 + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + name of resource. It will be of form + projects//locations//reportConfigs/. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. [Output only] Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. [Output only] Update time stamp + frequency_options (google.cloud.storageinsights_v1.types.FrequencyOptions): + The frequency of report generation. + csv_options (google.cloud.storageinsights_v1.types.CSVOptions): + Options for CSV formatted reports. + + This field is a member of `oneof`_ ``report_format``. + parquet_options (google.cloud.storageinsights_v1.types.ParquetOptions): + Options for Parquet formatted reports. + + This field is a member of `oneof`_ ``report_format``. + object_metadata_report_options (google.cloud.storageinsights_v1.types.ObjectMetadataReportOptions): + Report for exporting object metadata. + + This field is a member of `oneof`_ ``report_kind``. + labels (MutableMapping[str, str]): + Labels as key value pairs + display_name (str): + User provided display name which can be empty + and limited to 256 characters that is editable. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + frequency_options: "FrequencyOptions" = proto.Field( + proto.MESSAGE, + number=5, + message="FrequencyOptions", + ) + csv_options: "CSVOptions" = proto.Field( + proto.MESSAGE, + number=6, + oneof="report_format", + message="CSVOptions", + ) + parquet_options: "ParquetOptions" = proto.Field( + proto.MESSAGE, + number=7, + oneof="report_format", + message="ParquetOptions", + ) + object_metadata_report_options: "ObjectMetadataReportOptions" = proto.Field( + proto.MESSAGE, + number=8, + oneof="report_kind", + message="ObjectMetadataReportOptions", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + display_name: str = proto.Field( + proto.STRING, + number=11, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-storageinsights/mypy.ini b/packages/google-cloud-storageinsights/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-storageinsights/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-storageinsights/noxfile.py b/packages/google-cloud-storageinsights/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-cloud-storageinsights/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-cloud-storageinsights/renovate.json b/packages/google-cloud-storageinsights/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-storageinsights/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/snippet_metadata_google.cloud.storageinsights.v1.json b/packages/google-cloud-storageinsights/samples/generated_samples/snippet_metadata_google.cloud.storageinsights.v1.json new file mode 100644 index 000000000000..7411c6d3c4bf --- /dev/null +++ b/packages/google-cloud-storageinsights/samples/generated_samples/snippet_metadata_google.cloud.storageinsights.v1.json @@ -0,0 +1,1152 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.storageinsights.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-storageinsights", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.storageinsights_v1.StorageInsightsAsyncClient", + "shortName": "StorageInsightsAsyncClient" + }, + "fullName": "google.cloud.storageinsights_v1.StorageInsightsAsyncClient.create_report_config", + "method": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights.CreateReportConfig", + "service": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights", + "shortName": "StorageInsights" + }, + "shortName": "CreateReportConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storageinsights_v1.types.CreateReportConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "report_config", + "type": "google.cloud.storageinsights_v1.types.ReportConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.storageinsights_v1.types.ReportConfig", + "shortName": "create_report_config" + }, + "description": "Sample for CreateReportConfig", + "file": "storageinsights_v1_generated_storage_insights_create_report_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storageinsights_v1_generated_StorageInsights_CreateReportConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storageinsights_v1_generated_storage_insights_create_report_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.storageinsights_v1.StorageInsightsClient", + "shortName": "StorageInsightsClient" + }, + "fullName": "google.cloud.storageinsights_v1.StorageInsightsClient.create_report_config", + "method": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights.CreateReportConfig", + "service": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights", + "shortName": "StorageInsights" + }, + "shortName": "CreateReportConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storageinsights_v1.types.CreateReportConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "report_config", + "type": "google.cloud.storageinsights_v1.types.ReportConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.storageinsights_v1.types.ReportConfig", + "shortName": "create_report_config" + }, + "description": "Sample for CreateReportConfig", + "file": "storageinsights_v1_generated_storage_insights_create_report_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storageinsights_v1_generated_StorageInsights_CreateReportConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storageinsights_v1_generated_storage_insights_create_report_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.storageinsights_v1.StorageInsightsAsyncClient", + "shortName": "StorageInsightsAsyncClient" + }, + "fullName": "google.cloud.storageinsights_v1.StorageInsightsAsyncClient.delete_report_config", + "method": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights.DeleteReportConfig", + "service": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights", + "shortName": "StorageInsights" + }, + "shortName": "DeleteReportConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storageinsights_v1.types.DeleteReportConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_report_config" + }, + "description": "Sample for DeleteReportConfig", + "file": "storageinsights_v1_generated_storage_insights_delete_report_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storageinsights_v1_generated_StorageInsights_DeleteReportConfig_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storageinsights_v1_generated_storage_insights_delete_report_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.storageinsights_v1.StorageInsightsClient", + "shortName": "StorageInsightsClient" + }, + "fullName": "google.cloud.storageinsights_v1.StorageInsightsClient.delete_report_config", + "method": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights.DeleteReportConfig", + "service": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights", + "shortName": "StorageInsights" + }, + "shortName": "DeleteReportConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storageinsights_v1.types.DeleteReportConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_report_config" + }, + "description": "Sample for DeleteReportConfig", + "file": "storageinsights_v1_generated_storage_insights_delete_report_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storageinsights_v1_generated_StorageInsights_DeleteReportConfig_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storageinsights_v1_generated_storage_insights_delete_report_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.storageinsights_v1.StorageInsightsAsyncClient", + "shortName": "StorageInsightsAsyncClient" + }, + "fullName": "google.cloud.storageinsights_v1.StorageInsightsAsyncClient.get_report_config", + "method": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights.GetReportConfig", + "service": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights", + "shortName": "StorageInsights" + }, + "shortName": "GetReportConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storageinsights_v1.types.GetReportConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.storageinsights_v1.types.ReportConfig", + "shortName": "get_report_config" + }, + "description": "Sample for GetReportConfig", + "file": "storageinsights_v1_generated_storage_insights_get_report_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storageinsights_v1_generated_StorageInsights_GetReportConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storageinsights_v1_generated_storage_insights_get_report_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.storageinsights_v1.StorageInsightsClient", + "shortName": "StorageInsightsClient" + }, + "fullName": "google.cloud.storageinsights_v1.StorageInsightsClient.get_report_config", + "method": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights.GetReportConfig", + "service": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights", + "shortName": "StorageInsights" + }, + "shortName": "GetReportConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storageinsights_v1.types.GetReportConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.storageinsights_v1.types.ReportConfig", + "shortName": "get_report_config" + }, + "description": "Sample for GetReportConfig", + "file": "storageinsights_v1_generated_storage_insights_get_report_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storageinsights_v1_generated_StorageInsights_GetReportConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storageinsights_v1_generated_storage_insights_get_report_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.storageinsights_v1.StorageInsightsAsyncClient", + "shortName": "StorageInsightsAsyncClient" + }, + "fullName": "google.cloud.storageinsights_v1.StorageInsightsAsyncClient.get_report_detail", + "method": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights.GetReportDetail", + "service": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights", + "shortName": "StorageInsights" + }, + "shortName": "GetReportDetail" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storageinsights_v1.types.GetReportDetailRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.storageinsights_v1.types.ReportDetail", + "shortName": "get_report_detail" + }, + "description": "Sample for GetReportDetail", + "file": "storageinsights_v1_generated_storage_insights_get_report_detail_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storageinsights_v1_generated_StorageInsights_GetReportDetail_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storageinsights_v1_generated_storage_insights_get_report_detail_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.storageinsights_v1.StorageInsightsClient", + "shortName": "StorageInsightsClient" + }, + "fullName": "google.cloud.storageinsights_v1.StorageInsightsClient.get_report_detail", + "method": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights.GetReportDetail", + "service": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights", + "shortName": "StorageInsights" + }, + "shortName": "GetReportDetail" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storageinsights_v1.types.GetReportDetailRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.storageinsights_v1.types.ReportDetail", + "shortName": "get_report_detail" + }, + "description": "Sample for GetReportDetail", + "file": "storageinsights_v1_generated_storage_insights_get_report_detail_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storageinsights_v1_generated_StorageInsights_GetReportDetail_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storageinsights_v1_generated_storage_insights_get_report_detail_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.storageinsights_v1.StorageInsightsAsyncClient", + "shortName": "StorageInsightsAsyncClient" + }, + "fullName": "google.cloud.storageinsights_v1.StorageInsightsAsyncClient.list_report_configs", + "method": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights.ListReportConfigs", + "service": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights", + "shortName": "StorageInsights" + }, + "shortName": "ListReportConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storageinsights_v1.types.ListReportConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.storageinsights_v1.services.storage_insights.pagers.ListReportConfigsAsyncPager", + "shortName": "list_report_configs" + }, + "description": "Sample for ListReportConfigs", + "file": "storageinsights_v1_generated_storage_insights_list_report_configs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storageinsights_v1_generated_StorageInsights_ListReportConfigs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storageinsights_v1_generated_storage_insights_list_report_configs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.storageinsights_v1.StorageInsightsClient", + "shortName": "StorageInsightsClient" + }, + "fullName": "google.cloud.storageinsights_v1.StorageInsightsClient.list_report_configs", + "method": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights.ListReportConfigs", + "service": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights", + "shortName": "StorageInsights" + }, + "shortName": "ListReportConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storageinsights_v1.types.ListReportConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.storageinsights_v1.services.storage_insights.pagers.ListReportConfigsPager", + "shortName": "list_report_configs" + }, + "description": "Sample for ListReportConfigs", + "file": "storageinsights_v1_generated_storage_insights_list_report_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storageinsights_v1_generated_StorageInsights_ListReportConfigs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storageinsights_v1_generated_storage_insights_list_report_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.storageinsights_v1.StorageInsightsAsyncClient", + "shortName": "StorageInsightsAsyncClient" + }, + "fullName": "google.cloud.storageinsights_v1.StorageInsightsAsyncClient.list_report_details", + "method": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights.ListReportDetails", + "service": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights", + "shortName": "StorageInsights" + }, + "shortName": "ListReportDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storageinsights_v1.types.ListReportDetailsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.storageinsights_v1.services.storage_insights.pagers.ListReportDetailsAsyncPager", + "shortName": "list_report_details" + }, + "description": "Sample for ListReportDetails", + "file": "storageinsights_v1_generated_storage_insights_list_report_details_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storageinsights_v1_generated_StorageInsights_ListReportDetails_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storageinsights_v1_generated_storage_insights_list_report_details_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.storageinsights_v1.StorageInsightsClient", + "shortName": "StorageInsightsClient" + }, + "fullName": "google.cloud.storageinsights_v1.StorageInsightsClient.list_report_details", + "method": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights.ListReportDetails", + "service": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights", + "shortName": "StorageInsights" + }, + "shortName": "ListReportDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storageinsights_v1.types.ListReportDetailsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.storageinsights_v1.services.storage_insights.pagers.ListReportDetailsPager", + "shortName": "list_report_details" + }, + "description": "Sample for ListReportDetails", + "file": "storageinsights_v1_generated_storage_insights_list_report_details_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storageinsights_v1_generated_StorageInsights_ListReportDetails_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storageinsights_v1_generated_storage_insights_list_report_details_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.storageinsights_v1.StorageInsightsAsyncClient", + "shortName": "StorageInsightsAsyncClient" + }, + "fullName": "google.cloud.storageinsights_v1.StorageInsightsAsyncClient.update_report_config", + "method": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights.UpdateReportConfig", + "service": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights", + "shortName": "StorageInsights" + }, + "shortName": "UpdateReportConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storageinsights_v1.types.UpdateReportConfigRequest" + }, + { + "name": "report_config", + "type": "google.cloud.storageinsights_v1.types.ReportConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.storageinsights_v1.types.ReportConfig", + "shortName": "update_report_config" + }, + "description": "Sample for UpdateReportConfig", + "file": "storageinsights_v1_generated_storage_insights_update_report_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storageinsights_v1_generated_StorageInsights_UpdateReportConfig_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storageinsights_v1_generated_storage_insights_update_report_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.storageinsights_v1.StorageInsightsClient", + "shortName": "StorageInsightsClient" + }, + "fullName": "google.cloud.storageinsights_v1.StorageInsightsClient.update_report_config", + "method": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights.UpdateReportConfig", + "service": { + "fullName": "google.cloud.storageinsights.v1.StorageInsights", + "shortName": "StorageInsights" + }, + "shortName": "UpdateReportConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storageinsights_v1.types.UpdateReportConfigRequest" + }, + { + "name": "report_config", + "type": "google.cloud.storageinsights_v1.types.ReportConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.storageinsights_v1.types.ReportConfig", + "shortName": "update_report_config" + }, + "description": "Sample for UpdateReportConfig", + "file": "storageinsights_v1_generated_storage_insights_update_report_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storageinsights_v1_generated_StorageInsights_UpdateReportConfig_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storageinsights_v1_generated_storage_insights_update_report_config_sync.py" + } + ] +} diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_create_report_config_async.py b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_create_report_config_async.py new file mode 100644 index 000000000000..cdadebb98b51 --- /dev/null +++ b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_create_report_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReportConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storageinsights + + +# [START storageinsights_v1_generated_StorageInsights_CreateReportConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storageinsights_v1 + + +async def sample_create_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsAsyncClient() + + # Initialize request argument(s) + request = storageinsights_v1.CreateReportConfigRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_report_config(request=request) + + # Handle the response + print(response) + +# [END storageinsights_v1_generated_StorageInsights_CreateReportConfig_async] diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_create_report_config_sync.py b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_create_report_config_sync.py new file mode 100644 index 000000000000..b400ade86c93 --- /dev/null +++ b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_create_report_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReportConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storageinsights + + +# [START storageinsights_v1_generated_StorageInsights_CreateReportConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storageinsights_v1 + + +def sample_create_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsClient() + + # Initialize request argument(s) + request = storageinsights_v1.CreateReportConfigRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_report_config(request=request) + + # Handle the response + print(response) + +# [END storageinsights_v1_generated_StorageInsights_CreateReportConfig_sync] diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_delete_report_config_async.py b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_delete_report_config_async.py new file mode 100644 index 000000000000..e70b7fd07b28 --- /dev/null +++ b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_delete_report_config_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteReportConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storageinsights + + +# [START storageinsights_v1_generated_StorageInsights_DeleteReportConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storageinsights_v1 + + +async def sample_delete_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsAsyncClient() + + # Initialize request argument(s) + request = storageinsights_v1.DeleteReportConfigRequest( + name="name_value", + ) + + # Make the request + await client.delete_report_config(request=request) + + +# [END storageinsights_v1_generated_StorageInsights_DeleteReportConfig_async] diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_delete_report_config_sync.py b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_delete_report_config_sync.py new file mode 100644 index 000000000000..39927671b8b0 --- /dev/null +++ b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_delete_report_config_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteReportConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storageinsights + + +# [START storageinsights_v1_generated_StorageInsights_DeleteReportConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storageinsights_v1 + + +def sample_delete_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsClient() + + # Initialize request argument(s) + request = storageinsights_v1.DeleteReportConfigRequest( + name="name_value", + ) + + # Make the request + client.delete_report_config(request=request) + + +# [END storageinsights_v1_generated_StorageInsights_DeleteReportConfig_sync] diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_get_report_config_async.py b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_get_report_config_async.py new file mode 100644 index 000000000000..a9d6bfaef193 --- /dev/null +++ b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_get_report_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetReportConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storageinsights + + +# [START storageinsights_v1_generated_StorageInsights_GetReportConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storageinsights_v1 + + +async def sample_get_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsAsyncClient() + + # Initialize request argument(s) + request = storageinsights_v1.GetReportConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_report_config(request=request) + + # Handle the response + print(response) + +# [END storageinsights_v1_generated_StorageInsights_GetReportConfig_async] diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_get_report_config_sync.py b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_get_report_config_sync.py new file mode 100644 index 000000000000..a58148d99ddc --- /dev/null +++ b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_get_report_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetReportConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storageinsights + + +# [START storageinsights_v1_generated_StorageInsights_GetReportConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storageinsights_v1 + + +def sample_get_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsClient() + + # Initialize request argument(s) + request = storageinsights_v1.GetReportConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_report_config(request=request) + + # Handle the response + print(response) + +# [END storageinsights_v1_generated_StorageInsights_GetReportConfig_sync] diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_get_report_detail_async.py b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_get_report_detail_async.py new file mode 100644 index 000000000000..68daccded9eb --- /dev/null +++ b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_get_report_detail_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetReportDetail +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storageinsights + + +# [START storageinsights_v1_generated_StorageInsights_GetReportDetail_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storageinsights_v1 + + +async def sample_get_report_detail(): + # Create a client + client = storageinsights_v1.StorageInsightsAsyncClient() + + # Initialize request argument(s) + request = storageinsights_v1.GetReportDetailRequest( + name="name_value", + ) + + # Make the request + response = await client.get_report_detail(request=request) + + # Handle the response + print(response) + +# [END storageinsights_v1_generated_StorageInsights_GetReportDetail_async] diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_get_report_detail_sync.py b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_get_report_detail_sync.py new file mode 100644 index 000000000000..821187371f98 --- /dev/null +++ b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_get_report_detail_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetReportDetail +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storageinsights + + +# [START storageinsights_v1_generated_StorageInsights_GetReportDetail_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storageinsights_v1 + + +def sample_get_report_detail(): + # Create a client + client = storageinsights_v1.StorageInsightsClient() + + # Initialize request argument(s) + request = storageinsights_v1.GetReportDetailRequest( + name="name_value", + ) + + # Make the request + response = client.get_report_detail(request=request) + + # Handle the response + print(response) + +# [END storageinsights_v1_generated_StorageInsights_GetReportDetail_sync] diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_list_report_configs_async.py b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_list_report_configs_async.py new file mode 100644 index 000000000000..806274d63ec1 --- /dev/null +++ b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_list_report_configs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListReportConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storageinsights + + +# [START storageinsights_v1_generated_StorageInsights_ListReportConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storageinsights_v1 + + +async def sample_list_report_configs(): + # Create a client + client = storageinsights_v1.StorageInsightsAsyncClient() + + # Initialize request argument(s) + request = storageinsights_v1.ListReportConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_report_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END storageinsights_v1_generated_StorageInsights_ListReportConfigs_async] diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_list_report_configs_sync.py b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_list_report_configs_sync.py new file mode 100644 index 000000000000..dcd3a7a77384 --- /dev/null +++ b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_list_report_configs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListReportConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storageinsights + + +# [START storageinsights_v1_generated_StorageInsights_ListReportConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storageinsights_v1 + + +def sample_list_report_configs(): + # Create a client + client = storageinsights_v1.StorageInsightsClient() + + # Initialize request argument(s) + request = storageinsights_v1.ListReportConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_report_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END storageinsights_v1_generated_StorageInsights_ListReportConfigs_sync] diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_list_report_details_async.py b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_list_report_details_async.py new file mode 100644 index 000000000000..cd06b1ef5706 --- /dev/null +++ b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_list_report_details_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListReportDetails +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storageinsights + + +# [START storageinsights_v1_generated_StorageInsights_ListReportDetails_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storageinsights_v1 + + +async def sample_list_report_details(): + # Create a client + client = storageinsights_v1.StorageInsightsAsyncClient() + + # Initialize request argument(s) + request = storageinsights_v1.ListReportDetailsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_report_details(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END storageinsights_v1_generated_StorageInsights_ListReportDetails_async] diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_list_report_details_sync.py b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_list_report_details_sync.py new file mode 100644 index 000000000000..fd295979c7d9 --- /dev/null +++ b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_list_report_details_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListReportDetails +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storageinsights + + +# [START storageinsights_v1_generated_StorageInsights_ListReportDetails_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storageinsights_v1 + + +def sample_list_report_details(): + # Create a client + client = storageinsights_v1.StorageInsightsClient() + + # Initialize request argument(s) + request = storageinsights_v1.ListReportDetailsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_report_details(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END storageinsights_v1_generated_StorageInsights_ListReportDetails_sync] diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_update_report_config_async.py b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_update_report_config_async.py new file mode 100644 index 000000000000..f8ade92062a7 --- /dev/null +++ b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_update_report_config_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateReportConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storageinsights + + +# [START storageinsights_v1_generated_StorageInsights_UpdateReportConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storageinsights_v1 + + +async def sample_update_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsAsyncClient() + + # Initialize request argument(s) + request = storageinsights_v1.UpdateReportConfigRequest( + ) + + # Make the request + response = await client.update_report_config(request=request) + + # Handle the response + print(response) + +# [END storageinsights_v1_generated_StorageInsights_UpdateReportConfig_async] diff --git a/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_update_report_config_sync.py b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_update_report_config_sync.py new file mode 100644 index 000000000000..6994e82a06aa --- /dev/null +++ b/packages/google-cloud-storageinsights/samples/generated_samples/storageinsights_v1_generated_storage_insights_update_report_config_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateReportConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storageinsights + + +# [START storageinsights_v1_generated_StorageInsights_UpdateReportConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storageinsights_v1 + + +def sample_update_report_config(): + # Create a client + client = storageinsights_v1.StorageInsightsClient() + + # Initialize request argument(s) + request = storageinsights_v1.UpdateReportConfigRequest( + ) + + # Make the request + response = client.update_report_config(request=request) + + # Handle the response + print(response) + +# [END storageinsights_v1_generated_StorageInsights_UpdateReportConfig_sync] diff --git a/packages/google-cloud-storageinsights/scripts/decrypt-secrets.sh b/packages/google-cloud-storageinsights/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-cloud-storageinsights/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-storageinsights/scripts/fixup_storageinsights_v1_keywords.py b/packages/google-cloud-storageinsights/scripts/fixup_storageinsights_v1_keywords.py new file mode 100644 index 000000000000..5b865f88ac4f --- /dev/null +++ b/packages/google-cloud-storageinsights/scripts/fixup_storageinsights_v1_keywords.py @@ -0,0 +1,182 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class storageinsightsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_report_config': ('parent', 'report_config', 'request_id', ), + 'delete_report_config': ('name', 'force', 'request_id', ), + 'get_report_config': ('name', ), + 'get_report_detail': ('name', ), + 'list_report_configs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_report_details': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_report_config': ('update_mask', 'report_config', 'request_id', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=storageinsightsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the storageinsights client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-storageinsights/setup.py b/packages/google-cloud-storageinsights/setup.py new file mode 100644 index 000000000000..6d49812e862d --- /dev/null +++ b/packages/google-cloud-storageinsights/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-storageinsights" + + +description = "Google Cloud Storageinsights API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/storageinsights/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-storageinsights/testing/.gitignore b/packages/google-cloud-storageinsights/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-storageinsights/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-storageinsights/testing/constraints-3.10.txt b/packages/google-cloud-storageinsights/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-storageinsights/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-storageinsights/testing/constraints-3.11.txt b/packages/google-cloud-storageinsights/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-storageinsights/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-storageinsights/testing/constraints-3.12.txt b/packages/google-cloud-storageinsights/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-storageinsights/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-storageinsights/testing/constraints-3.7.txt b/packages/google-cloud-storageinsights/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-storageinsights/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-storageinsights/testing/constraints-3.8.txt b/packages/google-cloud-storageinsights/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-storageinsights/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-storageinsights/testing/constraints-3.9.txt b/packages/google-cloud-storageinsights/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-storageinsights/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-storageinsights/tests/__init__.py b/packages/google-cloud-storageinsights/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-storageinsights/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-storageinsights/tests/unit/__init__.py b/packages/google-cloud-storageinsights/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-storageinsights/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-storageinsights/tests/unit/gapic/__init__.py b/packages/google-cloud-storageinsights/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-storageinsights/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-storageinsights/tests/unit/gapic/storageinsights_v1/__init__.py b/packages/google-cloud-storageinsights/tests/unit/gapic/storageinsights_v1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-storageinsights/tests/unit/gapic/storageinsights_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-storageinsights/tests/unit/gapic/storageinsights_v1/test_storage_insights.py b/packages/google-cloud-storageinsights/tests/unit/gapic/storageinsights_v1/test_storage_insights.py new file mode 100644 index 000000000000..3130490a5c56 --- /dev/null +++ b/packages/google-cloud-storageinsights/tests/unit/gapic/storageinsights_v1/test_storage_insights.py @@ -0,0 +1,7157 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.storageinsights_v1.services.storage_insights import ( + StorageInsightsAsyncClient, + StorageInsightsClient, + pagers, + transports, +) +from google.cloud.storageinsights_v1.types import storageinsights + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert StorageInsightsClient._get_default_mtls_endpoint(None) is None + assert ( + StorageInsightsClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + StorageInsightsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + StorageInsightsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + StorageInsightsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + StorageInsightsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (StorageInsightsClient, "grpc"), + (StorageInsightsAsyncClient, "grpc_asyncio"), + (StorageInsightsClient, "rest"), + ], +) +def test_storage_insights_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "storageinsights.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://storageinsights.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.StorageInsightsGrpcTransport, "grpc"), + (transports.StorageInsightsGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.StorageInsightsRestTransport, "rest"), + ], +) +def test_storage_insights_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (StorageInsightsClient, "grpc"), + (StorageInsightsAsyncClient, "grpc_asyncio"), + (StorageInsightsClient, "rest"), + ], +) +def test_storage_insights_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "storageinsights.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://storageinsights.googleapis.com" + ) + + +def test_storage_insights_client_get_transport_class(): + transport = StorageInsightsClient.get_transport_class() + available_transports = [ + transports.StorageInsightsGrpcTransport, + transports.StorageInsightsRestTransport, + ] + assert transport in available_transports + + transport = StorageInsightsClient.get_transport_class("grpc") + assert transport == transports.StorageInsightsGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (StorageInsightsClient, transports.StorageInsightsGrpcTransport, "grpc"), + ( + StorageInsightsAsyncClient, + transports.StorageInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (StorageInsightsClient, transports.StorageInsightsRestTransport, "rest"), + ], +) +@mock.patch.object( + StorageInsightsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(StorageInsightsClient), +) +@mock.patch.object( + StorageInsightsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(StorageInsightsAsyncClient), +) +def test_storage_insights_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(StorageInsightsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(StorageInsightsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + StorageInsightsClient, + transports.StorageInsightsGrpcTransport, + "grpc", + "true", + ), + ( + StorageInsightsAsyncClient, + transports.StorageInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + StorageInsightsClient, + transports.StorageInsightsGrpcTransport, + "grpc", + "false", + ), + ( + StorageInsightsAsyncClient, + transports.StorageInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + StorageInsightsClient, + transports.StorageInsightsRestTransport, + "rest", + "true", + ), + ( + StorageInsightsClient, + transports.StorageInsightsRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + StorageInsightsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(StorageInsightsClient), +) +@mock.patch.object( + StorageInsightsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(StorageInsightsAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_storage_insights_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [StorageInsightsClient, StorageInsightsAsyncClient] +) +@mock.patch.object( + StorageInsightsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(StorageInsightsClient), +) +@mock.patch.object( + StorageInsightsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(StorageInsightsAsyncClient), +) +def test_storage_insights_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (StorageInsightsClient, transports.StorageInsightsGrpcTransport, "grpc"), + ( + StorageInsightsAsyncClient, + transports.StorageInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (StorageInsightsClient, transports.StorageInsightsRestTransport, "rest"), + ], +) +def test_storage_insights_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + StorageInsightsClient, + transports.StorageInsightsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + StorageInsightsAsyncClient, + transports.StorageInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (StorageInsightsClient, transports.StorageInsightsRestTransport, "rest", None), + ], +) +def test_storage_insights_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_storage_insights_client_client_options_from_dict(): + with mock.patch( + "google.cloud.storageinsights_v1.services.storage_insights.transports.StorageInsightsGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = StorageInsightsClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + StorageInsightsClient, + transports.StorageInsightsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + StorageInsightsAsyncClient, + transports.StorageInsightsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_storage_insights_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "storageinsights.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="storageinsights.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + storageinsights.ListReportConfigsRequest, + dict, + ], +) +def test_list_report_configs(request_type, transport: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ListReportConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_report_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.ListReportConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_report_configs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_configs), "__call__" + ) as call: + client.list_report_configs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.ListReportConfigsRequest() + + +@pytest.mark.asyncio +async def test_list_report_configs_async( + transport: str = "grpc_asyncio", + request_type=storageinsights.ListReportConfigsRequest, +): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ListReportConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_report_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.ListReportConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportConfigsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_report_configs_async_from_dict(): + await test_list_report_configs_async(request_type=dict) + + +def test_list_report_configs_field_headers(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storageinsights.ListReportConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_configs), "__call__" + ) as call: + call.return_value = storageinsights.ListReportConfigsResponse() + client.list_report_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_report_configs_field_headers_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storageinsights.ListReportConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_configs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ListReportConfigsResponse() + ) + await client.list_report_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_report_configs_flattened(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ListReportConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_report_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_report_configs_flattened_error(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_report_configs( + storageinsights.ListReportConfigsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_report_configs_flattened_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ListReportConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ListReportConfigsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_report_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_report_configs_flattened_error_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_report_configs( + storageinsights.ListReportConfigsRequest(), + parent="parent_value", + ) + + +def test_list_report_configs_pager(transport_name: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + storageinsights.ListReportConfigsResponse( + report_configs=[ + storageinsights.ReportConfig(), + storageinsights.ReportConfig(), + storageinsights.ReportConfig(), + ], + next_page_token="abc", + ), + storageinsights.ListReportConfigsResponse( + report_configs=[], + next_page_token="def", + ), + storageinsights.ListReportConfigsResponse( + report_configs=[ + storageinsights.ReportConfig(), + ], + next_page_token="ghi", + ), + storageinsights.ListReportConfigsResponse( + report_configs=[ + storageinsights.ReportConfig(), + storageinsights.ReportConfig(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_report_configs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, storageinsights.ReportConfig) for i in results) + + +def test_list_report_configs_pages(transport_name: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + storageinsights.ListReportConfigsResponse( + report_configs=[ + storageinsights.ReportConfig(), + storageinsights.ReportConfig(), + storageinsights.ReportConfig(), + ], + next_page_token="abc", + ), + storageinsights.ListReportConfigsResponse( + report_configs=[], + next_page_token="def", + ), + storageinsights.ListReportConfigsResponse( + report_configs=[ + storageinsights.ReportConfig(), + ], + next_page_token="ghi", + ), + storageinsights.ListReportConfigsResponse( + report_configs=[ + storageinsights.ReportConfig(), + storageinsights.ReportConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_report_configs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_report_configs_async_pager(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + storageinsights.ListReportConfigsResponse( + report_configs=[ + storageinsights.ReportConfig(), + storageinsights.ReportConfig(), + storageinsights.ReportConfig(), + ], + next_page_token="abc", + ), + storageinsights.ListReportConfigsResponse( + report_configs=[], + next_page_token="def", + ), + storageinsights.ListReportConfigsResponse( + report_configs=[ + storageinsights.ReportConfig(), + ], + next_page_token="ghi", + ), + storageinsights.ListReportConfigsResponse( + report_configs=[ + storageinsights.ReportConfig(), + storageinsights.ReportConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_report_configs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, storageinsights.ReportConfig) for i in responses) + + +@pytest.mark.asyncio +async def test_list_report_configs_async_pages(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + storageinsights.ListReportConfigsResponse( + report_configs=[ + storageinsights.ReportConfig(), + storageinsights.ReportConfig(), + storageinsights.ReportConfig(), + ], + next_page_token="abc", + ), + storageinsights.ListReportConfigsResponse( + report_configs=[], + next_page_token="def", + ), + storageinsights.ListReportConfigsResponse( + report_configs=[ + storageinsights.ReportConfig(), + ], + next_page_token="ghi", + ), + storageinsights.ListReportConfigsResponse( + report_configs=[ + storageinsights.ReportConfig(), + storageinsights.ReportConfig(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_report_configs(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + storageinsights.GetReportConfigRequest, + dict, + ], +) +def test_get_report_config(request_type, transport: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ReportConfig( + name="name_value", + display_name="display_name_value", + csv_options=storageinsights.CSVOptions( + record_separator="record_separator_value" + ), + object_metadata_report_options=storageinsights.ObjectMetadataReportOptions( + metadata_fields=["metadata_fields_value"] + ), + ) + response = client.get_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.GetReportConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, storageinsights.ReportConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_report_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_config), "__call__" + ) as call: + client.get_report_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.GetReportConfigRequest() + + +@pytest.mark.asyncio +async def test_get_report_config_async( + transport: str = "grpc_asyncio", request_type=storageinsights.GetReportConfigRequest +): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ReportConfig( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.GetReportConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, storageinsights.ReportConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_report_config_async_from_dict(): + await test_get_report_config_async(request_type=dict) + + +def test_get_report_config_field_headers(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storageinsights.GetReportConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_config), "__call__" + ) as call: + call.return_value = storageinsights.ReportConfig() + client.get_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_report_config_field_headers_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storageinsights.GetReportConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ReportConfig() + ) + await client.get_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_report_config_flattened(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ReportConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_report_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_report_config_flattened_error(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_report_config( + storageinsights.GetReportConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_report_config_flattened_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ReportConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ReportConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_report_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_report_config_flattened_error_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_report_config( + storageinsights.GetReportConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + storageinsights.CreateReportConfigRequest, + dict, + ], +) +def test_create_report_config(request_type, transport: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ReportConfig( + name="name_value", + display_name="display_name_value", + csv_options=storageinsights.CSVOptions( + record_separator="record_separator_value" + ), + object_metadata_report_options=storageinsights.ObjectMetadataReportOptions( + metadata_fields=["metadata_fields_value"] + ), + ) + response = client.create_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.CreateReportConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, storageinsights.ReportConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_create_report_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_report_config), "__call__" + ) as call: + client.create_report_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.CreateReportConfigRequest() + + +@pytest.mark.asyncio +async def test_create_report_config_async( + transport: str = "grpc_asyncio", + request_type=storageinsights.CreateReportConfigRequest, +): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ReportConfig( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.create_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.CreateReportConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, storageinsights.ReportConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_create_report_config_async_from_dict(): + await test_create_report_config_async(request_type=dict) + + +def test_create_report_config_field_headers(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storageinsights.CreateReportConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_report_config), "__call__" + ) as call: + call.return_value = storageinsights.ReportConfig() + client.create_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_report_config_field_headers_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storageinsights.CreateReportConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_report_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ReportConfig() + ) + await client.create_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_report_config_flattened(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ReportConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_report_config( + parent="parent_value", + report_config=storageinsights.ReportConfig(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].report_config + mock_val = storageinsights.ReportConfig(name="name_value") + assert arg == mock_val + + +def test_create_report_config_flattened_error(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_report_config( + storageinsights.CreateReportConfigRequest(), + parent="parent_value", + report_config=storageinsights.ReportConfig(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_report_config_flattened_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ReportConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ReportConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_report_config( + parent="parent_value", + report_config=storageinsights.ReportConfig(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].report_config + mock_val = storageinsights.ReportConfig(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_report_config_flattened_error_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_report_config( + storageinsights.CreateReportConfigRequest(), + parent="parent_value", + report_config=storageinsights.ReportConfig(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + storageinsights.UpdateReportConfigRequest, + dict, + ], +) +def test_update_report_config(request_type, transport: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ReportConfig( + name="name_value", + display_name="display_name_value", + csv_options=storageinsights.CSVOptions( + record_separator="record_separator_value" + ), + object_metadata_report_options=storageinsights.ObjectMetadataReportOptions( + metadata_fields=["metadata_fields_value"] + ), + ) + response = client.update_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.UpdateReportConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, storageinsights.ReportConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_update_report_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_report_config), "__call__" + ) as call: + client.update_report_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.UpdateReportConfigRequest() + + +@pytest.mark.asyncio +async def test_update_report_config_async( + transport: str = "grpc_asyncio", + request_type=storageinsights.UpdateReportConfigRequest, +): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ReportConfig( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.update_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.UpdateReportConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, storageinsights.ReportConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_update_report_config_async_from_dict(): + await test_update_report_config_async(request_type=dict) + + +def test_update_report_config_field_headers(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storageinsights.UpdateReportConfigRequest() + + request.report_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_report_config), "__call__" + ) as call: + call.return_value = storageinsights.ReportConfig() + client.update_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "report_config.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_report_config_field_headers_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storageinsights.UpdateReportConfigRequest() + + request.report_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_report_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ReportConfig() + ) + await client.update_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "report_config.name=name_value", + ) in kw["metadata"] + + +def test_update_report_config_flattened(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ReportConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_report_config( + report_config=storageinsights.ReportConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].report_config + mock_val = storageinsights.ReportConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_report_config_flattened_error(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_report_config( + storageinsights.UpdateReportConfigRequest(), + report_config=storageinsights.ReportConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_report_config_flattened_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ReportConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ReportConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_report_config( + report_config=storageinsights.ReportConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].report_config + mock_val = storageinsights.ReportConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_report_config_flattened_error_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_report_config( + storageinsights.UpdateReportConfigRequest(), + report_config=storageinsights.ReportConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + storageinsights.DeleteReportConfigRequest, + dict, + ], +) +def test_delete_report_config(request_type, transport: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.DeleteReportConfigRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_report_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_report_config), "__call__" + ) as call: + client.delete_report_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.DeleteReportConfigRequest() + + +@pytest.mark.asyncio +async def test_delete_report_config_async( + transport: str = "grpc_asyncio", + request_type=storageinsights.DeleteReportConfigRequest, +): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.DeleteReportConfigRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_report_config_async_from_dict(): + await test_delete_report_config_async(request_type=dict) + + +def test_delete_report_config_field_headers(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storageinsights.DeleteReportConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_report_config), "__call__" + ) as call: + call.return_value = None + client.delete_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_report_config_field_headers_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storageinsights.DeleteReportConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_report_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_report_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_report_config_flattened(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_report_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_report_config_flattened_error(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_report_config( + storageinsights.DeleteReportConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_report_config_flattened_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_report_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_report_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_report_config_flattened_error_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_report_config( + storageinsights.DeleteReportConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + storageinsights.ListReportDetailsRequest, + dict, + ], +) +def test_list_report_details(request_type, transport: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_details), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ListReportDetailsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_report_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.ListReportDetailsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportDetailsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_report_details_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_details), "__call__" + ) as call: + client.list_report_details() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.ListReportDetailsRequest() + + +@pytest.mark.asyncio +async def test_list_report_details_async( + transport: str = "grpc_asyncio", + request_type=storageinsights.ListReportDetailsRequest, +): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_details), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ListReportDetailsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_report_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.ListReportDetailsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportDetailsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_report_details_async_from_dict(): + await test_list_report_details_async(request_type=dict) + + +def test_list_report_details_field_headers(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storageinsights.ListReportDetailsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_details), "__call__" + ) as call: + call.return_value = storageinsights.ListReportDetailsResponse() + client.list_report_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_report_details_field_headers_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storageinsights.ListReportDetailsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_details), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ListReportDetailsResponse() + ) + await client.list_report_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_report_details_flattened(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_details), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ListReportDetailsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_report_details( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_report_details_flattened_error(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_report_details( + storageinsights.ListReportDetailsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_report_details_flattened_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_details), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ListReportDetailsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ListReportDetailsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_report_details( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_report_details_flattened_error_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_report_details( + storageinsights.ListReportDetailsRequest(), + parent="parent_value", + ) + + +def test_list_report_details_pager(transport_name: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_details), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + storageinsights.ListReportDetailsResponse( + report_details=[ + storageinsights.ReportDetail(), + storageinsights.ReportDetail(), + storageinsights.ReportDetail(), + ], + next_page_token="abc", + ), + storageinsights.ListReportDetailsResponse( + report_details=[], + next_page_token="def", + ), + storageinsights.ListReportDetailsResponse( + report_details=[ + storageinsights.ReportDetail(), + ], + next_page_token="ghi", + ), + storageinsights.ListReportDetailsResponse( + report_details=[ + storageinsights.ReportDetail(), + storageinsights.ReportDetail(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_report_details(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, storageinsights.ReportDetail) for i in results) + + +def test_list_report_details_pages(transport_name: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_details), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + storageinsights.ListReportDetailsResponse( + report_details=[ + storageinsights.ReportDetail(), + storageinsights.ReportDetail(), + storageinsights.ReportDetail(), + ], + next_page_token="abc", + ), + storageinsights.ListReportDetailsResponse( + report_details=[], + next_page_token="def", + ), + storageinsights.ListReportDetailsResponse( + report_details=[ + storageinsights.ReportDetail(), + ], + next_page_token="ghi", + ), + storageinsights.ListReportDetailsResponse( + report_details=[ + storageinsights.ReportDetail(), + storageinsights.ReportDetail(), + ], + ), + RuntimeError, + ) + pages = list(client.list_report_details(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_report_details_async_pager(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_details), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + storageinsights.ListReportDetailsResponse( + report_details=[ + storageinsights.ReportDetail(), + storageinsights.ReportDetail(), + storageinsights.ReportDetail(), + ], + next_page_token="abc", + ), + storageinsights.ListReportDetailsResponse( + report_details=[], + next_page_token="def", + ), + storageinsights.ListReportDetailsResponse( + report_details=[ + storageinsights.ReportDetail(), + ], + next_page_token="ghi", + ), + storageinsights.ListReportDetailsResponse( + report_details=[ + storageinsights.ReportDetail(), + storageinsights.ReportDetail(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_report_details( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, storageinsights.ReportDetail) for i in responses) + + +@pytest.mark.asyncio +async def test_list_report_details_async_pages(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_details), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + storageinsights.ListReportDetailsResponse( + report_details=[ + storageinsights.ReportDetail(), + storageinsights.ReportDetail(), + storageinsights.ReportDetail(), + ], + next_page_token="abc", + ), + storageinsights.ListReportDetailsResponse( + report_details=[], + next_page_token="def", + ), + storageinsights.ListReportDetailsResponse( + report_details=[ + storageinsights.ReportDetail(), + ], + next_page_token="ghi", + ), + storageinsights.ListReportDetailsResponse( + report_details=[ + storageinsights.ReportDetail(), + storageinsights.ReportDetail(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_report_details(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + storageinsights.GetReportDetailRequest, + dict, + ], +) +def test_get_report_detail(request_type, transport: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_detail), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ReportDetail( + name="name_value", + report_path_prefix="report_path_prefix_value", + shards_count=1293, + ) + response = client.get_report_detail(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.GetReportDetailRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, storageinsights.ReportDetail) + assert response.name == "name_value" + assert response.report_path_prefix == "report_path_prefix_value" + assert response.shards_count == 1293 + + +def test_get_report_detail_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_detail), "__call__" + ) as call: + client.get_report_detail() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.GetReportDetailRequest() + + +@pytest.mark.asyncio +async def test_get_report_detail_async( + transport: str = "grpc_asyncio", request_type=storageinsights.GetReportDetailRequest +): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_detail), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ReportDetail( + name="name_value", + report_path_prefix="report_path_prefix_value", + shards_count=1293, + ) + ) + response = await client.get_report_detail(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == storageinsights.GetReportDetailRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, storageinsights.ReportDetail) + assert response.name == "name_value" + assert response.report_path_prefix == "report_path_prefix_value" + assert response.shards_count == 1293 + + +@pytest.mark.asyncio +async def test_get_report_detail_async_from_dict(): + await test_get_report_detail_async(request_type=dict) + + +def test_get_report_detail_field_headers(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storageinsights.GetReportDetailRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_detail), "__call__" + ) as call: + call.return_value = storageinsights.ReportDetail() + client.get_report_detail(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_report_detail_field_headers_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storageinsights.GetReportDetailRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_detail), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ReportDetail() + ) + await client.get_report_detail(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_report_detail_flattened(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_detail), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ReportDetail() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_report_detail( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_report_detail_flattened_error(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_report_detail( + storageinsights.GetReportDetailRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_report_detail_flattened_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_report_detail), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = storageinsights.ReportDetail() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + storageinsights.ReportDetail() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_report_detail( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_report_detail_flattened_error_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_report_detail( + storageinsights.GetReportDetailRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + storageinsights.ListReportConfigsRequest, + dict, + ], +) +def test_list_report_configs_rest(request_type): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storageinsights.ListReportConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = storageinsights.ListReportConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_report_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_report_configs_rest_required_fields( + request_type=storageinsights.ListReportConfigsRequest, +): + transport_class = transports.StorageInsightsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_report_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_report_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = storageinsights.ListReportConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = storageinsights.ListReportConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_report_configs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_report_configs_rest_unset_required_fields(): + transport = transports.StorageInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_report_configs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_report_configs_rest_interceptors(null_interceptor): + transport = transports.StorageInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StorageInsightsRestInterceptor(), + ) + client = StorageInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StorageInsightsRestInterceptor, "post_list_report_configs" + ) as post, mock.patch.object( + transports.StorageInsightsRestInterceptor, "pre_list_report_configs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = storageinsights.ListReportConfigsRequest.pb( + storageinsights.ListReportConfigsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = storageinsights.ListReportConfigsResponse.to_json( + storageinsights.ListReportConfigsResponse() + ) + + request = storageinsights.ListReportConfigsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = storageinsights.ListReportConfigsResponse() + + client.list_report_configs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_report_configs_rest_bad_request( + transport: str = "rest", request_type=storageinsights.ListReportConfigsRequest +): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_report_configs(request) + + +def test_list_report_configs_rest_flattened(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storageinsights.ListReportConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = storageinsights.ListReportConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_report_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/reportConfigs" + % client.transport._host, + args[1], + ) + + +def test_list_report_configs_rest_flattened_error(transport: str = "rest"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_report_configs( + storageinsights.ListReportConfigsRequest(), + parent="parent_value", + ) + + +def test_list_report_configs_rest_pager(transport: str = "rest"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + storageinsights.ListReportConfigsResponse( + report_configs=[ + storageinsights.ReportConfig(), + storageinsights.ReportConfig(), + storageinsights.ReportConfig(), + ], + next_page_token="abc", + ), + storageinsights.ListReportConfigsResponse( + report_configs=[], + next_page_token="def", + ), + storageinsights.ListReportConfigsResponse( + report_configs=[ + storageinsights.ReportConfig(), + ], + next_page_token="ghi", + ), + storageinsights.ListReportConfigsResponse( + report_configs=[ + storageinsights.ReportConfig(), + storageinsights.ReportConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + storageinsights.ListReportConfigsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_report_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, storageinsights.ReportConfig) for i in results) + + pages = list(client.list_report_configs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + storageinsights.GetReportConfigRequest, + dict, + ], +) +def test_get_report_config_rest(request_type): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/reportConfigs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storageinsights.ReportConfig( + name="name_value", + display_name="display_name_value", + csv_options=storageinsights.CSVOptions( + record_separator="record_separator_value" + ), + object_metadata_report_options=storageinsights.ObjectMetadataReportOptions( + metadata_fields=["metadata_fields_value"] + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = storageinsights.ReportConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_report_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, storageinsights.ReportConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_report_config_rest_required_fields( + request_type=storageinsights.GetReportConfigRequest, +): + transport_class = transports.StorageInsightsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_report_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_report_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = storageinsights.ReportConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = storageinsights.ReportConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_report_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_report_config_rest_unset_required_fields(): + transport = transports.StorageInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_report_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_report_config_rest_interceptors(null_interceptor): + transport = transports.StorageInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StorageInsightsRestInterceptor(), + ) + client = StorageInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StorageInsightsRestInterceptor, "post_get_report_config" + ) as post, mock.patch.object( + transports.StorageInsightsRestInterceptor, "pre_get_report_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = storageinsights.GetReportConfigRequest.pb( + storageinsights.GetReportConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = storageinsights.ReportConfig.to_json( + storageinsights.ReportConfig() + ) + + request = storageinsights.GetReportConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = storageinsights.ReportConfig() + + client.get_report_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_report_config_rest_bad_request( + transport: str = "rest", request_type=storageinsights.GetReportConfigRequest +): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/reportConfigs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_report_config(request) + + +def test_get_report_config_rest_flattened(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storageinsights.ReportConfig() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/reportConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = storageinsights.ReportConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_report_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/reportConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_get_report_config_rest_flattened_error(transport: str = "rest"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_report_config( + storageinsights.GetReportConfigRequest(), + name="name_value", + ) + + +def test_get_report_config_rest_error(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + storageinsights.CreateReportConfigRequest, + dict, + ], +) +def test_create_report_config_rest(request_type): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["report_config"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "frequency_options": { + "frequency": 1, + "start_date": {"year": 433, "month": 550, "day": 318}, + "end_date": {}, + }, + "csv_options": { + "record_separator": "record_separator_value", + "delimiter": "delimiter_value", + "header_required": True, + }, + "parquet_options": {}, + "object_metadata_report_options": { + "metadata_fields": ["metadata_fields_value1", "metadata_fields_value2"], + "storage_filters": {"bucket": "bucket_value"}, + "storage_destination_options": { + "bucket": "bucket_value", + "destination_path": "destination_path_value", + }, + }, + "labels": {}, + "display_name": "display_name_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storageinsights.ReportConfig( + name="name_value", + display_name="display_name_value", + csv_options=storageinsights.CSVOptions( + record_separator="record_separator_value" + ), + object_metadata_report_options=storageinsights.ObjectMetadataReportOptions( + metadata_fields=["metadata_fields_value"] + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = storageinsights.ReportConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_report_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, storageinsights.ReportConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_create_report_config_rest_required_fields( + request_type=storageinsights.CreateReportConfigRequest, +): + transport_class = transports.StorageInsightsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_report_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_report_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = storageinsights.ReportConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = storageinsights.ReportConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_report_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_report_config_rest_unset_required_fields(): + transport = transports.StorageInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_report_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "parent", + "reportConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_report_config_rest_interceptors(null_interceptor): + transport = transports.StorageInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StorageInsightsRestInterceptor(), + ) + client = StorageInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StorageInsightsRestInterceptor, "post_create_report_config" + ) as post, mock.patch.object( + transports.StorageInsightsRestInterceptor, "pre_create_report_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = storageinsights.CreateReportConfigRequest.pb( + storageinsights.CreateReportConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = storageinsights.ReportConfig.to_json( + storageinsights.ReportConfig() + ) + + request = storageinsights.CreateReportConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = storageinsights.ReportConfig() + + client.create_report_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_report_config_rest_bad_request( + transport: str = "rest", request_type=storageinsights.CreateReportConfigRequest +): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["report_config"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "frequency_options": { + "frequency": 1, + "start_date": {"year": 433, "month": 550, "day": 318}, + "end_date": {}, + }, + "csv_options": { + "record_separator": "record_separator_value", + "delimiter": "delimiter_value", + "header_required": True, + }, + "parquet_options": {}, + "object_metadata_report_options": { + "metadata_fields": ["metadata_fields_value1", "metadata_fields_value2"], + "storage_filters": {"bucket": "bucket_value"}, + "storage_destination_options": { + "bucket": "bucket_value", + "destination_path": "destination_path_value", + }, + }, + "labels": {}, + "display_name": "display_name_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_report_config(request) + + +def test_create_report_config_rest_flattened(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storageinsights.ReportConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + report_config=storageinsights.ReportConfig(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = storageinsights.ReportConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_report_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/reportConfigs" + % client.transport._host, + args[1], + ) + + +def test_create_report_config_rest_flattened_error(transport: str = "rest"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_report_config( + storageinsights.CreateReportConfigRequest(), + parent="parent_value", + report_config=storageinsights.ReportConfig(name="name_value"), + ) + + +def test_create_report_config_rest_error(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + storageinsights.UpdateReportConfigRequest, + dict, + ], +) +def test_update_report_config_rest(request_type): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "report_config": { + "name": "projects/sample1/locations/sample2/reportConfigs/sample3" + } + } + request_init["report_config"] = { + "name": "projects/sample1/locations/sample2/reportConfigs/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "frequency_options": { + "frequency": 1, + "start_date": {"year": 433, "month": 550, "day": 318}, + "end_date": {}, + }, + "csv_options": { + "record_separator": "record_separator_value", + "delimiter": "delimiter_value", + "header_required": True, + }, + "parquet_options": {}, + "object_metadata_report_options": { + "metadata_fields": ["metadata_fields_value1", "metadata_fields_value2"], + "storage_filters": {"bucket": "bucket_value"}, + "storage_destination_options": { + "bucket": "bucket_value", + "destination_path": "destination_path_value", + }, + }, + "labels": {}, + "display_name": "display_name_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storageinsights.ReportConfig( + name="name_value", + display_name="display_name_value", + csv_options=storageinsights.CSVOptions( + record_separator="record_separator_value" + ), + object_metadata_report_options=storageinsights.ObjectMetadataReportOptions( + metadata_fields=["metadata_fields_value"] + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = storageinsights.ReportConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_report_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, storageinsights.ReportConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_update_report_config_rest_required_fields( + request_type=storageinsights.UpdateReportConfigRequest, +): + transport_class = transports.StorageInsightsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_report_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_report_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = storageinsights.ReportConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = storageinsights.ReportConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_report_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_report_config_rest_unset_required_fields(): + transport = transports.StorageInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_report_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "reportConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_report_config_rest_interceptors(null_interceptor): + transport = transports.StorageInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StorageInsightsRestInterceptor(), + ) + client = StorageInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StorageInsightsRestInterceptor, "post_update_report_config" + ) as post, mock.patch.object( + transports.StorageInsightsRestInterceptor, "pre_update_report_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = storageinsights.UpdateReportConfigRequest.pb( + storageinsights.UpdateReportConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = storageinsights.ReportConfig.to_json( + storageinsights.ReportConfig() + ) + + request = storageinsights.UpdateReportConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = storageinsights.ReportConfig() + + client.update_report_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_report_config_rest_bad_request( + transport: str = "rest", request_type=storageinsights.UpdateReportConfigRequest +): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "report_config": { + "name": "projects/sample1/locations/sample2/reportConfigs/sample3" + } + } + request_init["report_config"] = { + "name": "projects/sample1/locations/sample2/reportConfigs/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "frequency_options": { + "frequency": 1, + "start_date": {"year": 433, "month": 550, "day": 318}, + "end_date": {}, + }, + "csv_options": { + "record_separator": "record_separator_value", + "delimiter": "delimiter_value", + "header_required": True, + }, + "parquet_options": {}, + "object_metadata_report_options": { + "metadata_fields": ["metadata_fields_value1", "metadata_fields_value2"], + "storage_filters": {"bucket": "bucket_value"}, + "storage_destination_options": { + "bucket": "bucket_value", + "destination_path": "destination_path_value", + }, + }, + "labels": {}, + "display_name": "display_name_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_report_config(request) + + +def test_update_report_config_rest_flattened(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storageinsights.ReportConfig() + + # get arguments that satisfy an http rule for this method + sample_request = { + "report_config": { + "name": "projects/sample1/locations/sample2/reportConfigs/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + report_config=storageinsights.ReportConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = storageinsights.ReportConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_report_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{report_config.name=projects/*/locations/*/reportConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_update_report_config_rest_flattened_error(transport: str = "rest"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_report_config( + storageinsights.UpdateReportConfigRequest(), + report_config=storageinsights.ReportConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_report_config_rest_error(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + storageinsights.DeleteReportConfigRequest, + dict, + ], +) +def test_delete_report_config_rest(request_type): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/reportConfigs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_report_config(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_report_config_rest_required_fields( + request_type=storageinsights.DeleteReportConfigRequest, +): + transport_class = transports.StorageInsightsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_report_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_report_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_report_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_report_config_rest_unset_required_fields(): + transport = transports.StorageInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_report_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_report_config_rest_interceptors(null_interceptor): + transport = transports.StorageInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StorageInsightsRestInterceptor(), + ) + client = StorageInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StorageInsightsRestInterceptor, "pre_delete_report_config" + ) as pre: + pre.assert_not_called() + pb_message = storageinsights.DeleteReportConfigRequest.pb( + storageinsights.DeleteReportConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = storageinsights.DeleteReportConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_report_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_report_config_rest_bad_request( + transport: str = "rest", request_type=storageinsights.DeleteReportConfigRequest +): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/reportConfigs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_report_config(request) + + +def test_delete_report_config_rest_flattened(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/reportConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_report_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/reportConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_report_config_rest_flattened_error(transport: str = "rest"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_report_config( + storageinsights.DeleteReportConfigRequest(), + name="name_value", + ) + + +def test_delete_report_config_rest_error(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + storageinsights.ListReportDetailsRequest, + dict, + ], +) +def test_list_report_details_rest(request_type): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/reportConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storageinsights.ListReportDetailsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = storageinsights.ListReportDetailsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_report_details(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportDetailsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_report_details_rest_required_fields( + request_type=storageinsights.ListReportDetailsRequest, +): + transport_class = transports.StorageInsightsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_report_details._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_report_details._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = storageinsights.ListReportDetailsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = storageinsights.ListReportDetailsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_report_details(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_report_details_rest_unset_required_fields(): + transport = transports.StorageInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_report_details._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_report_details_rest_interceptors(null_interceptor): + transport = transports.StorageInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StorageInsightsRestInterceptor(), + ) + client = StorageInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StorageInsightsRestInterceptor, "post_list_report_details" + ) as post, mock.patch.object( + transports.StorageInsightsRestInterceptor, "pre_list_report_details" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = storageinsights.ListReportDetailsRequest.pb( + storageinsights.ListReportDetailsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = storageinsights.ListReportDetailsResponse.to_json( + storageinsights.ListReportDetailsResponse() + ) + + request = storageinsights.ListReportDetailsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = storageinsights.ListReportDetailsResponse() + + client.list_report_details( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_report_details_rest_bad_request( + transport: str = "rest", request_type=storageinsights.ListReportDetailsRequest +): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/reportConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_report_details(request) + + +def test_list_report_details_rest_flattened(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storageinsights.ListReportDetailsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/reportConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = storageinsights.ListReportDetailsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_report_details(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/reportConfigs/*}/reportDetails" + % client.transport._host, + args[1], + ) + + +def test_list_report_details_rest_flattened_error(transport: str = "rest"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_report_details( + storageinsights.ListReportDetailsRequest(), + parent="parent_value", + ) + + +def test_list_report_details_rest_pager(transport: str = "rest"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + storageinsights.ListReportDetailsResponse( + report_details=[ + storageinsights.ReportDetail(), + storageinsights.ReportDetail(), + storageinsights.ReportDetail(), + ], + next_page_token="abc", + ), + storageinsights.ListReportDetailsResponse( + report_details=[], + next_page_token="def", + ), + storageinsights.ListReportDetailsResponse( + report_details=[ + storageinsights.ReportDetail(), + ], + next_page_token="ghi", + ), + storageinsights.ListReportDetailsResponse( + report_details=[ + storageinsights.ReportDetail(), + storageinsights.ReportDetail(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + storageinsights.ListReportDetailsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/reportConfigs/sample3" + } + + pager = client.list_report_details(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, storageinsights.ReportDetail) for i in results) + + pages = list(client.list_report_details(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + storageinsights.GetReportDetailRequest, + dict, + ], +) +def test_get_report_detail_rest(request_type): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/reportConfigs/sample3/reportDetails/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storageinsights.ReportDetail( + name="name_value", + report_path_prefix="report_path_prefix_value", + shards_count=1293, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = storageinsights.ReportDetail.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_report_detail(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, storageinsights.ReportDetail) + assert response.name == "name_value" + assert response.report_path_prefix == "report_path_prefix_value" + assert response.shards_count == 1293 + + +def test_get_report_detail_rest_required_fields( + request_type=storageinsights.GetReportDetailRequest, +): + transport_class = transports.StorageInsightsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_report_detail._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_report_detail._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = storageinsights.ReportDetail() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = storageinsights.ReportDetail.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_report_detail(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_report_detail_rest_unset_required_fields(): + transport = transports.StorageInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_report_detail._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_report_detail_rest_interceptors(null_interceptor): + transport = transports.StorageInsightsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StorageInsightsRestInterceptor(), + ) + client = StorageInsightsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StorageInsightsRestInterceptor, "post_get_report_detail" + ) as post, mock.patch.object( + transports.StorageInsightsRestInterceptor, "pre_get_report_detail" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = storageinsights.GetReportDetailRequest.pb( + storageinsights.GetReportDetailRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = storageinsights.ReportDetail.to_json( + storageinsights.ReportDetail() + ) + + request = storageinsights.GetReportDetailRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = storageinsights.ReportDetail() + + client.get_report_detail( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_report_detail_rest_bad_request( + transport: str = "rest", request_type=storageinsights.GetReportDetailRequest +): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/reportConfigs/sample3/reportDetails/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_report_detail(request) + + +def test_get_report_detail_rest_flattened(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storageinsights.ReportDetail() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/reportConfigs/sample3/reportDetails/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = storageinsights.ReportDetail.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_report_detail(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/reportConfigs/*/reportDetails/*}" + % client.transport._host, + args[1], + ) + + +def test_get_report_detail_rest_flattened_error(transport: str = "rest"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_report_detail( + storageinsights.GetReportDetailRequest(), + name="name_value", + ) + + +def test_get_report_detail_rest_error(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.StorageInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.StorageInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = StorageInsightsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.StorageInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = StorageInsightsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = StorageInsightsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.StorageInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = StorageInsightsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.StorageInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = StorageInsightsClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.StorageInsightsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.StorageInsightsGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.StorageInsightsGrpcTransport, + transports.StorageInsightsGrpcAsyncIOTransport, + transports.StorageInsightsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = StorageInsightsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.StorageInsightsGrpcTransport, + ) + + +def test_storage_insights_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.StorageInsightsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_storage_insights_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.storageinsights_v1.services.storage_insights.transports.StorageInsightsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.StorageInsightsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_report_configs", + "get_report_config", + "create_report_config", + "update_report_config", + "delete_report_config", + "list_report_details", + "get_report_detail", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_storage_insights_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.storageinsights_v1.services.storage_insights.transports.StorageInsightsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.StorageInsightsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_storage_insights_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.storageinsights_v1.services.storage_insights.transports.StorageInsightsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.StorageInsightsTransport() + adc.assert_called_once() + + +def test_storage_insights_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + StorageInsightsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.StorageInsightsGrpcTransport, + transports.StorageInsightsGrpcAsyncIOTransport, + ], +) +def test_storage_insights_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.StorageInsightsGrpcTransport, + transports.StorageInsightsGrpcAsyncIOTransport, + transports.StorageInsightsRestTransport, + ], +) +def test_storage_insights_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.StorageInsightsGrpcTransport, grpc_helpers), + (transports.StorageInsightsGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_storage_insights_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "storageinsights.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="storageinsights.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.StorageInsightsGrpcTransport, + transports.StorageInsightsGrpcAsyncIOTransport, + ], +) +def test_storage_insights_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_storage_insights_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.StorageInsightsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_storage_insights_host_no_port(transport_name): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="storageinsights.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "storageinsights.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://storageinsights.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_storage_insights_host_with_port(transport_name): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="storageinsights.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "storageinsights.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://storageinsights.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_storage_insights_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = StorageInsightsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = StorageInsightsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_report_configs._session + session2 = client2.transport.list_report_configs._session + assert session1 != session2 + session1 = client1.transport.get_report_config._session + session2 = client2.transport.get_report_config._session + assert session1 != session2 + session1 = client1.transport.create_report_config._session + session2 = client2.transport.create_report_config._session + assert session1 != session2 + session1 = client1.transport.update_report_config._session + session2 = client2.transport.update_report_config._session + assert session1 != session2 + session1 = client1.transport.delete_report_config._session + session2 = client2.transport.delete_report_config._session + assert session1 != session2 + session1 = client1.transport.list_report_details._session + session2 = client2.transport.list_report_details._session + assert session1 != session2 + session1 = client1.transport.get_report_detail._session + session2 = client2.transport.get_report_detail._session + assert session1 != session2 + + +def test_storage_insights_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.StorageInsightsGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_storage_insights_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.StorageInsightsGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.StorageInsightsGrpcTransport, + transports.StorageInsightsGrpcAsyncIOTransport, + ], +) +def test_storage_insights_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.StorageInsightsGrpcTransport, + transports.StorageInsightsGrpcAsyncIOTransport, + ], +) +def test_storage_insights_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_report_config_path(): + project = "squid" + location = "clam" + report_config = "whelk" + expected = ( + "projects/{project}/locations/{location}/reportConfigs/{report_config}".format( + project=project, + location=location, + report_config=report_config, + ) + ) + actual = StorageInsightsClient.report_config_path(project, location, report_config) + assert expected == actual + + +def test_parse_report_config_path(): + expected = { + "project": "octopus", + "location": "oyster", + "report_config": "nudibranch", + } + path = StorageInsightsClient.report_config_path(**expected) + + # Check that the path construction is reversible. + actual = StorageInsightsClient.parse_report_config_path(path) + assert expected == actual + + +def test_report_detail_path(): + project = "cuttlefish" + location = "mussel" + report_config = "winkle" + report_detail = "nautilus" + expected = "projects/{project}/locations/{location}/reportConfigs/{report_config}/reportDetails/{report_detail}".format( + project=project, + location=location, + report_config=report_config, + report_detail=report_detail, + ) + actual = StorageInsightsClient.report_detail_path( + project, location, report_config, report_detail + ) + assert expected == actual + + +def test_parse_report_detail_path(): + expected = { + "project": "scallop", + "location": "abalone", + "report_config": "squid", + "report_detail": "clam", + } + path = StorageInsightsClient.report_detail_path(**expected) + + # Check that the path construction is reversible. + actual = StorageInsightsClient.parse_report_detail_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = StorageInsightsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = StorageInsightsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = StorageInsightsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = StorageInsightsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = StorageInsightsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = StorageInsightsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = StorageInsightsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = StorageInsightsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = StorageInsightsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = StorageInsightsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = StorageInsightsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = StorageInsightsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = StorageInsightsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = StorageInsightsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = StorageInsightsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.StorageInsightsTransport, "_prep_wrapped_messages" + ) as prep: + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.StorageInsightsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = StorageInsightsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = StorageInsightsClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = StorageInsightsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = StorageInsightsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (StorageInsightsClient, transports.StorageInsightsGrpcTransport), + (StorageInsightsAsyncClient, transports.StorageInsightsGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-support/.OwlBot.yaml b/packages/google-cloud-support/.OwlBot.yaml new file mode 100644 index 000000000000..71e383435344 --- /dev/null +++ b/packages/google-cloud-support/.OwlBot.yaml @@ -0,0 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/support/(.*)/.*-py + dest: /owl-bot-staging/google-cloud-support/$1 diff --git a/packages/google-cloud-support/.coveragerc b/packages/google-cloud-support/.coveragerc new file mode 100644 index 000000000000..8a96f9066cf9 --- /dev/null +++ b/packages/google-cloud-support/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/support/__init__.py + google/cloud/support/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-support/.flake8 b/packages/google-cloud-support/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-cloud-support/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-support/.gitignore b/packages/google-cloud-support/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-support/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-support/.repo-metadata.json b/packages/google-cloud-support/.repo-metadata.json new file mode 100644 index 000000000000..4214fe95943c --- /dev/null +++ b/packages/google-cloud-support/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "support", + "name_pretty": "Google Cloud Support API", + "api_description": "Manages Google Cloud technical support cases for Customer Care support offerings.", + "product_documentation": "https://cloud.google.com/support/docs/reference/support-api", + "client_documentation": "https://cloud.google.com/python/docs/reference/support/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-support", + "api_id": "support.googleapis.com", + "default_version": "v2", + "codeowner_team": "", + "api_shortname": "support" +} diff --git a/packages/google-cloud-support/CHANGELOG.md b/packages/google-cloud-support/CHANGELOG.md new file mode 100644 index 000000000000..1e85c3397732 --- /dev/null +++ b/packages/google-cloud-support/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 0.1.0 (2023-05-12) + + +### Features + +* add initial files for google.cloud.support.v2 ([#11176](https://github.com/googleapis/google-cloud-python/issues/11176)) ([84d9653](https://github.com/googleapis/google-cloud-python/commit/84d9653eb3fb4c22c039934a94e39d19b8c8705c)) + +## Changelog diff --git a/packages/google-cloud-support/CODE_OF_CONDUCT.md b/packages/google-cloud-support/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-support/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-support/CONTRIBUTING.rst b/packages/google-cloud-support/CONTRIBUTING.rst new file mode 100644 index 000000000000..b0645f292673 --- /dev/null +++ b/packages/google-cloud-support/CONTRIBUTING.rst @@ -0,0 +1,281 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-support + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +We also explicitly decided to support Python 3 beginning with version 3.7. +Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-support/LICENSE b/packages/google-cloud-support/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-support/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-support/MANIFEST.in b/packages/google-cloud-support/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-cloud-support/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-support/README.rst b/packages/google-cloud-support/README.rst new file mode 100644 index 000000000000..f4fd07ee3b1f --- /dev/null +++ b/packages/google-cloud-support/README.rst @@ -0,0 +1,107 @@ +Python Client for Google Cloud Support API +========================================== + +|preview| |pypi| |versions| + +`Google Cloud Support API`_: Manages Google Cloud technical support cases for Customer Care support offerings. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-support.svg + :target: https://pypi.org/project/google-cloud-support/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-support.svg + :target: https://pypi.org/project/google-cloud-support/ +.. _Google Cloud Support API: https://cloud.google.com/support/docs/reference/support-api +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/support/latest +.. _Product Documentation: https://cloud.google.com/support/docs/reference/support-api + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Cloud Support API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Cloud Support API.: https://cloud.google.com/support/docs/reference/support-api +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-support + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-support + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Cloud Support API + to see other available methods on the client. +- Read the `Google Cloud Support API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google Cloud Support API Product documentation: https://cloud.google.com/support/docs/reference/support-api +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-support/SECURITY.md b/packages/google-cloud-support/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-support/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-support/docs/CHANGELOG.md b/packages/google-cloud-support/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-support/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-support/docs/README.rst b/packages/google-cloud-support/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-support/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-support/docs/_static/custom.css b/packages/google-cloud-support/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-support/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-support/docs/_templates/layout.html b/packages/google-cloud-support/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-support/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-support/docs/conf.py b/packages/google-cloud-support/docs/conf.py new file mode 100644 index 000000000000..b2539fc4e71a --- /dev/null +++ b/packages/google-cloud-support/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-support documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-support" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-support", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-support-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-support.tex", + "google-cloud-support Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-support", + "google-cloud-support Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-support", + "google-cloud-support Documentation", + author, + "google-cloud-support", + "google-cloud-support Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-support/docs/index.rst b/packages/google-cloud-support/docs/index.rst new file mode 100644 index 000000000000..4549214425c5 --- /dev/null +++ b/packages/google-cloud-support/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + support_v2/services + support_v2/types + + +Changelog +--------- + +For a list of all ``google-cloud-support`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-support/docs/multiprocessing.rst b/packages/google-cloud-support/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-support/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-support/docs/support_v2/case_attachment_service.rst b/packages/google-cloud-support/docs/support_v2/case_attachment_service.rst new file mode 100644 index 000000000000..b1ff7d5671df --- /dev/null +++ b/packages/google-cloud-support/docs/support_v2/case_attachment_service.rst @@ -0,0 +1,10 @@ +CaseAttachmentService +--------------------------------------- + +.. automodule:: google.cloud.support_v2.services.case_attachment_service + :members: + :inherited-members: + +.. automodule:: google.cloud.support_v2.services.case_attachment_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-support/docs/support_v2/case_service.rst b/packages/google-cloud-support/docs/support_v2/case_service.rst new file mode 100644 index 000000000000..6eb0348617ba --- /dev/null +++ b/packages/google-cloud-support/docs/support_v2/case_service.rst @@ -0,0 +1,10 @@ +CaseService +----------------------------- + +.. automodule:: google.cloud.support_v2.services.case_service + :members: + :inherited-members: + +.. automodule:: google.cloud.support_v2.services.case_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-support/docs/support_v2/comment_service.rst b/packages/google-cloud-support/docs/support_v2/comment_service.rst new file mode 100644 index 000000000000..5d6fad4d9cf6 --- /dev/null +++ b/packages/google-cloud-support/docs/support_v2/comment_service.rst @@ -0,0 +1,10 @@ +CommentService +-------------------------------- + +.. automodule:: google.cloud.support_v2.services.comment_service + :members: + :inherited-members: + +.. automodule:: google.cloud.support_v2.services.comment_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-support/docs/support_v2/services.rst b/packages/google-cloud-support/docs/support_v2/services.rst new file mode 100644 index 000000000000..0f2958ba81f1 --- /dev/null +++ b/packages/google-cloud-support/docs/support_v2/services.rst @@ -0,0 +1,8 @@ +Services for Google Cloud Support v2 API +======================================== +.. toctree:: + :maxdepth: 2 + + case_attachment_service + case_service + comment_service diff --git a/packages/google-cloud-support/docs/support_v2/types.rst b/packages/google-cloud-support/docs/support_v2/types.rst new file mode 100644 index 000000000000..08626b3565b9 --- /dev/null +++ b/packages/google-cloud-support/docs/support_v2/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Support v2 API +===================================== + +.. automodule:: google.cloud.support_v2.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-support/google/cloud/support/__init__.py b/packages/google-cloud-support/google/cloud/support/__init__.py new file mode 100644 index 000000000000..ddf604b323f0 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support/__init__.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.support import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.support_v2.services.case_attachment_service.async_client import ( + CaseAttachmentServiceAsyncClient, +) +from google.cloud.support_v2.services.case_attachment_service.client import ( + CaseAttachmentServiceClient, +) +from google.cloud.support_v2.services.case_service.async_client import ( + CaseServiceAsyncClient, +) +from google.cloud.support_v2.services.case_service.client import CaseServiceClient +from google.cloud.support_v2.services.comment_service.async_client import ( + CommentServiceAsyncClient, +) +from google.cloud.support_v2.services.comment_service.client import CommentServiceClient +from google.cloud.support_v2.types.actor import Actor +from google.cloud.support_v2.types.attachment import Attachment +from google.cloud.support_v2.types.attachment_service import ( + ListAttachmentsRequest, + ListAttachmentsResponse, +) +from google.cloud.support_v2.types.case import Case, CaseClassification +from google.cloud.support_v2.types.case_service import ( + CloseCaseRequest, + CreateCaseRequest, + EscalateCaseRequest, + GetCaseRequest, + ListCasesRequest, + ListCasesResponse, + SearchCaseClassificationsRequest, + SearchCaseClassificationsResponse, + SearchCasesRequest, + SearchCasesResponse, + UpdateCaseRequest, +) +from google.cloud.support_v2.types.comment import Comment +from google.cloud.support_v2.types.comment_service import ( + CreateCommentRequest, + ListCommentsRequest, + ListCommentsResponse, +) +from google.cloud.support_v2.types.escalation import Escalation + +__all__ = ( + "CaseAttachmentServiceClient", + "CaseAttachmentServiceAsyncClient", + "CaseServiceClient", + "CaseServiceAsyncClient", + "CommentServiceClient", + "CommentServiceAsyncClient", + "Actor", + "Attachment", + "ListAttachmentsRequest", + "ListAttachmentsResponse", + "Case", + "CaseClassification", + "CloseCaseRequest", + "CreateCaseRequest", + "EscalateCaseRequest", + "GetCaseRequest", + "ListCasesRequest", + "ListCasesResponse", + "SearchCaseClassificationsRequest", + "SearchCaseClassificationsResponse", + "SearchCasesRequest", + "SearchCasesResponse", + "UpdateCaseRequest", + "Comment", + "CreateCommentRequest", + "ListCommentsRequest", + "ListCommentsResponse", + "Escalation", +) diff --git a/packages/google-cloud-support/google/cloud/support/gapic_version.py b/packages/google-cloud-support/google/cloud/support/gapic_version.py new file mode 100644 index 000000000000..aa80f74315ce --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-support/google/cloud/support/py.typed b/packages/google-cloud-support/google/cloud/support/py.typed new file mode 100644 index 000000000000..e4aea2b09399 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-support package uses inline types. diff --git a/packages/google-cloud-support/google/cloud/support_v2/__init__.py b/packages/google-cloud-support/google/cloud/support_v2/__init__.py new file mode 100644 index 000000000000..cfc6d299a9c1 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/__init__.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.support_v2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.case_attachment_service import ( + CaseAttachmentServiceAsyncClient, + CaseAttachmentServiceClient, +) +from .services.case_service import CaseServiceAsyncClient, CaseServiceClient +from .services.comment_service import CommentServiceAsyncClient, CommentServiceClient +from .types.actor import Actor +from .types.attachment import Attachment +from .types.attachment_service import ListAttachmentsRequest, ListAttachmentsResponse +from .types.case import Case, CaseClassification +from .types.case_service import ( + CloseCaseRequest, + CreateCaseRequest, + EscalateCaseRequest, + GetCaseRequest, + ListCasesRequest, + ListCasesResponse, + SearchCaseClassificationsRequest, + SearchCaseClassificationsResponse, + SearchCasesRequest, + SearchCasesResponse, + UpdateCaseRequest, +) +from .types.comment import Comment +from .types.comment_service import ( + CreateCommentRequest, + ListCommentsRequest, + ListCommentsResponse, +) +from .types.escalation import Escalation + +__all__ = ( + "CaseAttachmentServiceAsyncClient", + "CaseServiceAsyncClient", + "CommentServiceAsyncClient", + "Actor", + "Attachment", + "Case", + "CaseAttachmentServiceClient", + "CaseClassification", + "CaseServiceClient", + "CloseCaseRequest", + "Comment", + "CommentServiceClient", + "CreateCaseRequest", + "CreateCommentRequest", + "EscalateCaseRequest", + "Escalation", + "GetCaseRequest", + "ListAttachmentsRequest", + "ListAttachmentsResponse", + "ListCasesRequest", + "ListCasesResponse", + "ListCommentsRequest", + "ListCommentsResponse", + "SearchCaseClassificationsRequest", + "SearchCaseClassificationsResponse", + "SearchCasesRequest", + "SearchCasesResponse", + "UpdateCaseRequest", +) diff --git a/packages/google-cloud-support/google/cloud/support_v2/gapic_metadata.json b/packages/google-cloud-support/google/cloud/support_v2/gapic_metadata.json new file mode 100644 index 000000000000..bdb9792547f2 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/gapic_metadata.json @@ -0,0 +1,231 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.support_v2", + "protoPackage": "google.cloud.support.v2", + "schema": "1.0", + "services": { + "CaseAttachmentService": { + "clients": { + "grpc": { + "libraryClient": "CaseAttachmentServiceClient", + "rpcs": { + "ListAttachments": { + "methods": [ + "list_attachments" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CaseAttachmentServiceAsyncClient", + "rpcs": { + "ListAttachments": { + "methods": [ + "list_attachments" + ] + } + } + }, + "rest": { + "libraryClient": "CaseAttachmentServiceClient", + "rpcs": { + "ListAttachments": { + "methods": [ + "list_attachments" + ] + } + } + } + } + }, + "CaseService": { + "clients": { + "grpc": { + "libraryClient": "CaseServiceClient", + "rpcs": { + "CloseCase": { + "methods": [ + "close_case" + ] + }, + "CreateCase": { + "methods": [ + "create_case" + ] + }, + "EscalateCase": { + "methods": [ + "escalate_case" + ] + }, + "GetCase": { + "methods": [ + "get_case" + ] + }, + "ListCases": { + "methods": [ + "list_cases" + ] + }, + "SearchCaseClassifications": { + "methods": [ + "search_case_classifications" + ] + }, + "SearchCases": { + "methods": [ + "search_cases" + ] + }, + "UpdateCase": { + "methods": [ + "update_case" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CaseServiceAsyncClient", + "rpcs": { + "CloseCase": { + "methods": [ + "close_case" + ] + }, + "CreateCase": { + "methods": [ + "create_case" + ] + }, + "EscalateCase": { + "methods": [ + "escalate_case" + ] + }, + "GetCase": { + "methods": [ + "get_case" + ] + }, + "ListCases": { + "methods": [ + "list_cases" + ] + }, + "SearchCaseClassifications": { + "methods": [ + "search_case_classifications" + ] + }, + "SearchCases": { + "methods": [ + "search_cases" + ] + }, + "UpdateCase": { + "methods": [ + "update_case" + ] + } + } + }, + "rest": { + "libraryClient": "CaseServiceClient", + "rpcs": { + "CloseCase": { + "methods": [ + "close_case" + ] + }, + "CreateCase": { + "methods": [ + "create_case" + ] + }, + "EscalateCase": { + "methods": [ + "escalate_case" + ] + }, + "GetCase": { + "methods": [ + "get_case" + ] + }, + "ListCases": { + "methods": [ + "list_cases" + ] + }, + "SearchCaseClassifications": { + "methods": [ + "search_case_classifications" + ] + }, + "SearchCases": { + "methods": [ + "search_cases" + ] + }, + "UpdateCase": { + "methods": [ + "update_case" + ] + } + } + } + } + }, + "CommentService": { + "clients": { + "grpc": { + "libraryClient": "CommentServiceClient", + "rpcs": { + "CreateComment": { + "methods": [ + "create_comment" + ] + }, + "ListComments": { + "methods": [ + "list_comments" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CommentServiceAsyncClient", + "rpcs": { + "CreateComment": { + "methods": [ + "create_comment" + ] + }, + "ListComments": { + "methods": [ + "list_comments" + ] + } + } + }, + "rest": { + "libraryClient": "CommentServiceClient", + "rpcs": { + "CreateComment": { + "methods": [ + "create_comment" + ] + }, + "ListComments": { + "methods": [ + "list_comments" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-support/google/cloud/support_v2/gapic_version.py b/packages/google-cloud-support/google/cloud/support_v2/gapic_version.py new file mode 100644 index 000000000000..aa80f74315ce --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-support/google/cloud/support_v2/py.typed b/packages/google-cloud-support/google/cloud/support_v2/py.typed new file mode 100644 index 000000000000..e4aea2b09399 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-support package uses inline types. diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/__init__.py b/packages/google-cloud-support/google/cloud/support_v2/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/__init__.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/__init__.py new file mode 100644 index 000000000000..b64f890647fe --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import CaseAttachmentServiceAsyncClient +from .client import CaseAttachmentServiceClient + +__all__ = ( + "CaseAttachmentServiceClient", + "CaseAttachmentServiceAsyncClient", +) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/async_client.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/async_client.py new file mode 100644 index 000000000000..4ed02663d93e --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/async_client.py @@ -0,0 +1,366 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.support_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.support_v2.services.case_attachment_service import pagers +from google.cloud.support_v2.types import attachment, attachment_service + +from .client import CaseAttachmentServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, CaseAttachmentServiceTransport +from .transports.grpc_asyncio import CaseAttachmentServiceGrpcAsyncIOTransport + + +class CaseAttachmentServiceAsyncClient: + """A service to manage file attachment for Google Cloud support + cases. + """ + + _client: CaseAttachmentServiceClient + + DEFAULT_ENDPOINT = CaseAttachmentServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CaseAttachmentServiceClient.DEFAULT_MTLS_ENDPOINT + + attachment_path = staticmethod(CaseAttachmentServiceClient.attachment_path) + parse_attachment_path = staticmethod( + CaseAttachmentServiceClient.parse_attachment_path + ) + case_path = staticmethod(CaseAttachmentServiceClient.case_path) + parse_case_path = staticmethod(CaseAttachmentServiceClient.parse_case_path) + common_billing_account_path = staticmethod( + CaseAttachmentServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CaseAttachmentServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(CaseAttachmentServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + CaseAttachmentServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + CaseAttachmentServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + CaseAttachmentServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(CaseAttachmentServiceClient.common_project_path) + parse_common_project_path = staticmethod( + CaseAttachmentServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + CaseAttachmentServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + CaseAttachmentServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CaseAttachmentServiceAsyncClient: The constructed client. + """ + return CaseAttachmentServiceClient.from_service_account_info.__func__(CaseAttachmentServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CaseAttachmentServiceAsyncClient: The constructed client. + """ + return CaseAttachmentServiceClient.from_service_account_file.__func__(CaseAttachmentServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CaseAttachmentServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CaseAttachmentServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CaseAttachmentServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(CaseAttachmentServiceClient).get_transport_class, + type(CaseAttachmentServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CaseAttachmentServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the case attachment service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CaseAttachmentServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CaseAttachmentServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_attachments( + self, + request: Optional[ + Union[attachment_service.ListAttachmentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAttachmentsAsyncPager: + r"""Retrieve all attachments associated with a support + case. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + async def sample_list_attachments(): + # Create a client + client = support_v2.CaseAttachmentServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.ListAttachmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_attachments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.support_v2.types.ListAttachmentsRequest, dict]]): + The request object. The request message for the + ListAttachments endpoint. + parent (:class:`str`): + Required. The resource name of Case + object for which attachments should be + listed. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.services.case_attachment_service.pagers.ListAttachmentsAsyncPager: + The response message for the + ListAttachments endpoint. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = attachment_service.ListAttachmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_attachments, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAttachmentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CaseAttachmentServiceAsyncClient",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/client.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/client.py new file mode 100644 index 000000000000..d6b30d837486 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/client.py @@ -0,0 +1,603 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.support_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.support_v2.services.case_attachment_service import pagers +from google.cloud.support_v2.types import attachment, attachment_service + +from .transports.base import DEFAULT_CLIENT_INFO, CaseAttachmentServiceTransport +from .transports.grpc import CaseAttachmentServiceGrpcTransport +from .transports.grpc_asyncio import CaseAttachmentServiceGrpcAsyncIOTransport +from .transports.rest import CaseAttachmentServiceRestTransport + + +class CaseAttachmentServiceClientMeta(type): + """Metaclass for the CaseAttachmentService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[CaseAttachmentServiceTransport]] + _transport_registry["grpc"] = CaseAttachmentServiceGrpcTransport + _transport_registry["grpc_asyncio"] = CaseAttachmentServiceGrpcAsyncIOTransport + _transport_registry["rest"] = CaseAttachmentServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CaseAttachmentServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CaseAttachmentServiceClient(metaclass=CaseAttachmentServiceClientMeta): + """A service to manage file attachment for Google Cloud support + cases. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudsupport.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CaseAttachmentServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CaseAttachmentServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CaseAttachmentServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CaseAttachmentServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def attachment_path( + organization: str, + case: str, + attachment_id: str, + ) -> str: + """Returns a fully-qualified attachment string.""" + return "organizations/{organization}/cases/{case}/attachments/{attachment_id}".format( + organization=organization, + case=case, + attachment_id=attachment_id, + ) + + @staticmethod + def parse_attachment_path(path: str) -> Dict[str, str]: + """Parses a attachment path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/cases/(?P.+?)/attachments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def case_path( + organization: str, + case: str, + ) -> str: + """Returns a fully-qualified case string.""" + return "organizations/{organization}/cases/{case}".format( + organization=organization, + case=case, + ) + + @staticmethod + def parse_case_path(path: str) -> Dict[str, str]: + """Parses a case path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/cases/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CaseAttachmentServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the case attachment service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CaseAttachmentServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CaseAttachmentServiceTransport): + # transport is a CaseAttachmentServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_attachments( + self, + request: Optional[ + Union[attachment_service.ListAttachmentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAttachmentsPager: + r"""Retrieve all attachments associated with a support + case. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + def sample_list_attachments(): + # Create a client + client = support_v2.CaseAttachmentServiceClient() + + # Initialize request argument(s) + request = support_v2.ListAttachmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_attachments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.support_v2.types.ListAttachmentsRequest, dict]): + The request object. The request message for the + ListAttachments endpoint. + parent (str): + Required. The resource name of Case + object for which attachments should be + listed. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.services.case_attachment_service.pagers.ListAttachmentsPager: + The response message for the + ListAttachments endpoint. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a attachment_service.ListAttachmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, attachment_service.ListAttachmentsRequest): + request = attachment_service.ListAttachmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_attachments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAttachmentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CaseAttachmentServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CaseAttachmentServiceClient",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/pagers.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/pagers.py new file mode 100644 index 000000000000..afa5c6f2e8f2 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.support_v2.types import attachment, attachment_service + + +class ListAttachmentsPager: + """A pager for iterating through ``list_attachments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.support_v2.types.ListAttachmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``attachments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAttachments`` requests and continue to iterate + through the ``attachments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.support_v2.types.ListAttachmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., attachment_service.ListAttachmentsResponse], + request: attachment_service.ListAttachmentsRequest, + response: attachment_service.ListAttachmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.support_v2.types.ListAttachmentsRequest): + The initial request object. + response (google.cloud.support_v2.types.ListAttachmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = attachment_service.ListAttachmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[attachment_service.ListAttachmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[attachment.Attachment]: + for page in self.pages: + yield from page.attachments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAttachmentsAsyncPager: + """A pager for iterating through ``list_attachments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.support_v2.types.ListAttachmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``attachments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAttachments`` requests and continue to iterate + through the ``attachments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.support_v2.types.ListAttachmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[attachment_service.ListAttachmentsResponse]], + request: attachment_service.ListAttachmentsRequest, + response: attachment_service.ListAttachmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.support_v2.types.ListAttachmentsRequest): + The initial request object. + response (google.cloud.support_v2.types.ListAttachmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = attachment_service.ListAttachmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[attachment_service.ListAttachmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[attachment.Attachment]: + async def async_generator(): + async for page in self.pages: + for response in page.attachments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/transports/__init__.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/transports/__init__.py new file mode 100644 index 000000000000..86ce7ac4d14b --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CaseAttachmentServiceTransport +from .grpc import CaseAttachmentServiceGrpcTransport +from .grpc_asyncio import CaseAttachmentServiceGrpcAsyncIOTransport +from .rest import ( + CaseAttachmentServiceRestInterceptor, + CaseAttachmentServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[CaseAttachmentServiceTransport]] +_transport_registry["grpc"] = CaseAttachmentServiceGrpcTransport +_transport_registry["grpc_asyncio"] = CaseAttachmentServiceGrpcAsyncIOTransport +_transport_registry["rest"] = CaseAttachmentServiceRestTransport + +__all__ = ( + "CaseAttachmentServiceTransport", + "CaseAttachmentServiceGrpcTransport", + "CaseAttachmentServiceGrpcAsyncIOTransport", + "CaseAttachmentServiceRestTransport", + "CaseAttachmentServiceRestInterceptor", +) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/transports/base.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/transports/base.py new file mode 100644 index 000000000000..1480a09dc2fb --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/transports/base.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.support_v2 import gapic_version as package_version +from google.cloud.support_v2.types import attachment_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class CaseAttachmentServiceTransport(abc.ABC): + """Abstract transport class for CaseAttachmentService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "cloudsupport.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_attachments: gapic_v1.method.wrap_method( + self.list_attachments, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_attachments( + self, + ) -> Callable[ + [attachment_service.ListAttachmentsRequest], + Union[ + attachment_service.ListAttachmentsResponse, + Awaitable[attachment_service.ListAttachmentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("CaseAttachmentServiceTransport",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/transports/grpc.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/transports/grpc.py new file mode 100644 index 000000000000..9bb9098d96d6 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/transports/grpc.py @@ -0,0 +1,270 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.cloud.support_v2.types import attachment_service + +from .base import DEFAULT_CLIENT_INFO, CaseAttachmentServiceTransport + + +class CaseAttachmentServiceGrpcTransport(CaseAttachmentServiceTransport): + """gRPC backend transport for CaseAttachmentService. + + A service to manage file attachment for Google Cloud support + cases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudsupport.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "cloudsupport.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_attachments( + self, + ) -> Callable[ + [attachment_service.ListAttachmentsRequest], + attachment_service.ListAttachmentsResponse, + ]: + r"""Return a callable for the list attachments method over gRPC. + + Retrieve all attachments associated with a support + case. + + Returns: + Callable[[~.ListAttachmentsRequest], + ~.ListAttachmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_attachments" not in self._stubs: + self._stubs["list_attachments"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseAttachmentService/ListAttachments", + request_serializer=attachment_service.ListAttachmentsRequest.serialize, + response_deserializer=attachment_service.ListAttachmentsResponse.deserialize, + ) + return self._stubs["list_attachments"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("CaseAttachmentServiceGrpcTransport",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/transports/grpc_asyncio.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..fa07fea39768 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/transports/grpc_asyncio.py @@ -0,0 +1,269 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.support_v2.types import attachment_service + +from .base import DEFAULT_CLIENT_INFO, CaseAttachmentServiceTransport +from .grpc import CaseAttachmentServiceGrpcTransport + + +class CaseAttachmentServiceGrpcAsyncIOTransport(CaseAttachmentServiceTransport): + """gRPC AsyncIO backend transport for CaseAttachmentService. + + A service to manage file attachment for Google Cloud support + cases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudsupport.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudsupport.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_attachments( + self, + ) -> Callable[ + [attachment_service.ListAttachmentsRequest], + Awaitable[attachment_service.ListAttachmentsResponse], + ]: + r"""Return a callable for the list attachments method over gRPC. + + Retrieve all attachments associated with a support + case. + + Returns: + Callable[[~.ListAttachmentsRequest], + Awaitable[~.ListAttachmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_attachments" not in self._stubs: + self._stubs["list_attachments"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseAttachmentService/ListAttachments", + request_serializer=attachment_service.ListAttachmentsRequest.serialize, + response_deserializer=attachment_service.ListAttachmentsResponse.deserialize, + ) + return self._stubs["list_attachments"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("CaseAttachmentServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/transports/rest.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/transports/rest.py new file mode 100644 index 000000000000..07fee965ba8a --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_attachment_service/transports/rest.py @@ -0,0 +1,315 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.support_v2.types import attachment_service + +from .base import CaseAttachmentServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CaseAttachmentServiceRestInterceptor: + """Interceptor for CaseAttachmentService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CaseAttachmentServiceRestTransport. + + .. code-block:: python + class MyCustomCaseAttachmentServiceInterceptor(CaseAttachmentServiceRestInterceptor): + def pre_list_attachments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_attachments(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CaseAttachmentServiceRestTransport(interceptor=MyCustomCaseAttachmentServiceInterceptor()) + client = CaseAttachmentServiceClient(transport=transport) + + + """ + + def pre_list_attachments( + self, + request: attachment_service.ListAttachmentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[attachment_service.ListAttachmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_attachments + + Override in a subclass to manipulate the request or metadata + before they are sent to the CaseAttachmentService server. + """ + return request, metadata + + def post_list_attachments( + self, response: attachment_service.ListAttachmentsResponse + ) -> attachment_service.ListAttachmentsResponse: + """Post-rpc interceptor for list_attachments + + Override in a subclass to manipulate the response + after it is returned by the CaseAttachmentService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CaseAttachmentServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CaseAttachmentServiceRestInterceptor + + +class CaseAttachmentServiceRestTransport(CaseAttachmentServiceTransport): + """REST backend transport for CaseAttachmentService. + + A service to manage file attachment for Google Cloud support + cases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "cloudsupport.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CaseAttachmentServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CaseAttachmentServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ListAttachments(CaseAttachmentServiceRestStub): + def __hash__(self): + return hash("ListAttachments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: attachment_service.ListAttachmentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> attachment_service.ListAttachmentsResponse: + r"""Call the list attachments method over HTTP. + + Args: + request (~.attachment_service.ListAttachmentsRequest): + The request object. The request message for the + ListAttachments endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.attachment_service.ListAttachmentsResponse: + The response message for the + ListAttachments endpoint. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=projects/*/cases/*}/attachments", + }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*/cases/*}/attachments", + }, + ] + request, metadata = self._interceptor.pre_list_attachments( + request, metadata + ) + pb_request = attachment_service.ListAttachmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = attachment_service.ListAttachmentsResponse() + pb_resp = attachment_service.ListAttachmentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_attachments(resp) + return resp + + @property + def list_attachments( + self, + ) -> Callable[ + [attachment_service.ListAttachmentsRequest], + attachment_service.ListAttachmentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAttachments(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CaseAttachmentServiceRestTransport",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_service/__init__.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/__init__.py new file mode 100644 index 000000000000..0cb710ca2f48 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import CaseServiceAsyncClient +from .client import CaseServiceClient + +__all__ = ( + "CaseServiceClient", + "CaseServiceAsyncClient", +) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_service/async_client.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/async_client.py new file mode 100644 index 000000000000..ad97352f2217 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/async_client.py @@ -0,0 +1,1064 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.support_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.support_v2.services.case_service import pagers +from google.cloud.support_v2.types import actor +from google.cloud.support_v2.types import case +from google.cloud.support_v2.types import case as gcs_case +from google.cloud.support_v2.types import case_service + +from .client import CaseServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, CaseServiceTransport +from .transports.grpc_asyncio import CaseServiceGrpcAsyncIOTransport + + +class CaseServiceAsyncClient: + """A service to manage Google Cloud support cases.""" + + _client: CaseServiceClient + + DEFAULT_ENDPOINT = CaseServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CaseServiceClient.DEFAULT_MTLS_ENDPOINT + + case_path = staticmethod(CaseServiceClient.case_path) + parse_case_path = staticmethod(CaseServiceClient.parse_case_path) + common_billing_account_path = staticmethod( + CaseServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CaseServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(CaseServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(CaseServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(CaseServiceClient.common_organization_path) + parse_common_organization_path = staticmethod( + CaseServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(CaseServiceClient.common_project_path) + parse_common_project_path = staticmethod( + CaseServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(CaseServiceClient.common_location_path) + parse_common_location_path = staticmethod( + CaseServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CaseServiceAsyncClient: The constructed client. + """ + return CaseServiceClient.from_service_account_info.__func__(CaseServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CaseServiceAsyncClient: The constructed client. + """ + return CaseServiceClient.from_service_account_file.__func__(CaseServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CaseServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CaseServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CaseServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(CaseServiceClient).get_transport_class, type(CaseServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CaseServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the case service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CaseServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CaseServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_case( + self, + request: Optional[Union[case_service.GetCaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> case.Case: + r"""Retrieve the specified case. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + async def sample_get_case(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.GetCaseRequest( + name="name_value", + ) + + # Make the request + response = await client.get_case(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.support_v2.types.GetCaseRequest, dict]]): + The request object. The request message for the GetCase + endpoint. + name (:class:`str`): + Required. The fully qualified name of + a case to be retrieved. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.types.Case: + A support case. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = case_service.GetCaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_case, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_cases( + self, + request: Optional[Union[case_service.ListCasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCasesAsyncPager: + r"""Retrieve all cases under the specified parent. + + Note: Listing cases under an Organization returns only the cases + directly parented by that organization. To retrieve all cases + under an organization, including cases parented by projects + under that organization, use ``cases.search``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + async def sample_list_cases(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.ListCasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cases(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.support_v2.types.ListCasesRequest, dict]]): + The request object. The request message for the ListCases + endpoint. + parent (:class:`str`): + Required. The fully qualified name of + parent resource to list cases under. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.services.case_service.pagers.ListCasesAsyncPager: + The response message for the + ListCases endpoint. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = case_service.ListCasesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_cases, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListCasesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def search_cases( + self, + request: Optional[Union[case_service.SearchCasesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchCasesAsyncPager: + r"""Search cases using the specified query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + async def sample_search_cases(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.SearchCasesRequest( + ) + + # Make the request + page_result = client.search_cases(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.support_v2.types.SearchCasesRequest, dict]]): + The request object. The request message for the + SearchCases endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.services.case_service.pagers.SearchCasesAsyncPager: + The response message for the + SearchCases endpoint. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = case_service.SearchCasesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_cases, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchCasesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_case( + self, + request: Optional[Union[case_service.CreateCaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + case: Optional[gcs_case.Case] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_case.Case: + r"""Create a new case and associate it with the given Google Cloud + Resource. The case object must have the following fields set: + ``display_name``, ``description``, ``classification``, and + ``priority``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + async def sample_create_case(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.CreateCaseRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_case(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.support_v2.types.CreateCaseRequest, dict]]): + The request object. The request message for the + CreateCase endpoint. + parent (:class:`str`): + Required. The name of the Google + Cloud Resource under which the case + should be created. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + case (:class:`google.cloud.support_v2.types.Case`): + Required. The case to be created. + This corresponds to the ``case`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.types.Case: + A support case. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, case]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = case_service.CreateCaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if case is not None: + request.case = case + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_case, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_case( + self, + request: Optional[Union[case_service.UpdateCaseRequest, dict]] = None, + *, + case: Optional[gcs_case.Case] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_case.Case: + r"""Update the specified case. Only a subset of fields + can be updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + async def sample_update_case(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.UpdateCaseRequest( + ) + + # Make the request + response = await client.update_case(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.support_v2.types.UpdateCaseRequest, dict]]): + The request object. The request message for the + UpdateCase endpoint + case (:class:`google.cloud.support_v2.types.Case`): + Required. The case object to update. + This corresponds to the ``case`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + A list of attributes of the case object that should be + updated as part of this request. Supported values are + ``priority``, ``display_name``, and + ``subscriber_email_addresses``. If no fields are + specified, all supported fields are updated. + + WARNING: If you do not provide a field mask, then you + might accidentally clear some fields. For example, if + you leave the field mask empty and do not provide a + value for ``subscriber_email_addresses``, then + ``subscriber_email_addresses`` is updated to empty. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.types.Case: + A support case. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([case, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = case_service.UpdateCaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if case is not None: + request.case = case + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_case, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("case.name", request.case.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def escalate_case( + self, + request: Optional[Union[case_service.EscalateCaseRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> case.Case: + r"""Escalate a case. Escalating a case will initiate the + Google Cloud Support escalation management process. + This operation is only available to certain Customer + Care tiers. Go to https://cloud.google.com/support and + look for 'Technical support escalations' in the feature + list to find out which tiers are able to perform + escalations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + async def sample_escalate_case(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.EscalateCaseRequest( + name="name_value", + ) + + # Make the request + response = await client.escalate_case(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.support_v2.types.EscalateCaseRequest, dict]]): + The request object. The request message for the + EscalateCase endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.types.Case: + A support case. + """ + # Create or coerce a protobuf request object. + request = case_service.EscalateCaseRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.escalate_case, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def close_case( + self, + request: Optional[Union[case_service.CloseCaseRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> case.Case: + r"""Close the specified case. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + async def sample_close_case(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.CloseCaseRequest( + name="name_value", + ) + + # Make the request + response = await client.close_case(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.support_v2.types.CloseCaseRequest, dict]]): + The request object. The request message for the CloseCase + endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.types.Case: + A support case. + """ + # Create or coerce a protobuf request object. + request = case_service.CloseCaseRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.close_case, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def search_case_classifications( + self, + request: Optional[ + Union[case_service.SearchCaseClassificationsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchCaseClassificationsAsyncPager: + r"""Retrieve valid classifications to be used when + creating a support case. The classications are + hierarchical, with each classification containing all + levels of the hierarchy, separated by " > ". For example + "Technical Issue > Compute > Compute Engine". + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + async def sample_search_case_classifications(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.SearchCaseClassificationsRequest( + ) + + # Make the request + page_result = client.search_case_classifications(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.support_v2.types.SearchCaseClassificationsRequest, dict]]): + The request object. The request message for + SearchCaseClassifications endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.services.case_service.pagers.SearchCaseClassificationsAsyncPager: + The response message for + SearchCaseClassifications endpoint. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = case_service.SearchCaseClassificationsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_case_classifications, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchCaseClassificationsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CaseServiceAsyncClient",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_service/client.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/client.py new file mode 100644 index 000000000000..1f9366383eb1 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/client.py @@ -0,0 +1,1267 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.support_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.support_v2.services.case_service import pagers +from google.cloud.support_v2.types import actor +from google.cloud.support_v2.types import case +from google.cloud.support_v2.types import case as gcs_case +from google.cloud.support_v2.types import case_service + +from .transports.base import DEFAULT_CLIENT_INFO, CaseServiceTransport +from .transports.grpc import CaseServiceGrpcTransport +from .transports.grpc_asyncio import CaseServiceGrpcAsyncIOTransport +from .transports.rest import CaseServiceRestTransport + + +class CaseServiceClientMeta(type): + """Metaclass for the CaseService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[CaseServiceTransport]] + _transport_registry["grpc"] = CaseServiceGrpcTransport + _transport_registry["grpc_asyncio"] = CaseServiceGrpcAsyncIOTransport + _transport_registry["rest"] = CaseServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CaseServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CaseServiceClient(metaclass=CaseServiceClientMeta): + """A service to manage Google Cloud support cases.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudsupport.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CaseServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CaseServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CaseServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CaseServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def case_path( + organization: str, + case: str, + ) -> str: + """Returns a fully-qualified case string.""" + return "organizations/{organization}/cases/{case}".format( + organization=organization, + case=case, + ) + + @staticmethod + def parse_case_path(path: str) -> Dict[str, str]: + """Parses a case path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/cases/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CaseServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the case service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CaseServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CaseServiceTransport): + # transport is a CaseServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get_case( + self, + request: Optional[Union[case_service.GetCaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> case.Case: + r"""Retrieve the specified case. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + def sample_get_case(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.GetCaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_case(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.support_v2.types.GetCaseRequest, dict]): + The request object. The request message for the GetCase + endpoint. + name (str): + Required. The fully qualified name of + a case to be retrieved. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.types.Case: + A support case. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a case_service.GetCaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, case_service.GetCaseRequest): + request = case_service.GetCaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_case] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_cases( + self, + request: Optional[Union[case_service.ListCasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCasesPager: + r"""Retrieve all cases under the specified parent. + + Note: Listing cases under an Organization returns only the cases + directly parented by that organization. To retrieve all cases + under an organization, including cases parented by projects + under that organization, use ``cases.search``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + def sample_list_cases(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.ListCasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cases(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.support_v2.types.ListCasesRequest, dict]): + The request object. The request message for the ListCases + endpoint. + parent (str): + Required. The fully qualified name of + parent resource to list cases under. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.services.case_service.pagers.ListCasesPager: + The response message for the + ListCases endpoint. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a case_service.ListCasesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, case_service.ListCasesRequest): + request = case_service.ListCasesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_cases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCasesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def search_cases( + self, + request: Optional[Union[case_service.SearchCasesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchCasesPager: + r"""Search cases using the specified query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + def sample_search_cases(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.SearchCasesRequest( + ) + + # Make the request + page_result = client.search_cases(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.support_v2.types.SearchCasesRequest, dict]): + The request object. The request message for the + SearchCases endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.services.case_service.pagers.SearchCasesPager: + The response message for the + SearchCases endpoint. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a case_service.SearchCasesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, case_service.SearchCasesRequest): + request = case_service.SearchCasesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_cases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchCasesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_case( + self, + request: Optional[Union[case_service.CreateCaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + case: Optional[gcs_case.Case] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_case.Case: + r"""Create a new case and associate it with the given Google Cloud + Resource. The case object must have the following fields set: + ``display_name``, ``description``, ``classification``, and + ``priority``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + def sample_create_case(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.CreateCaseRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_case(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.support_v2.types.CreateCaseRequest, dict]): + The request object. The request message for the + CreateCase endpoint. + parent (str): + Required. The name of the Google + Cloud Resource under which the case + should be created. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + case (google.cloud.support_v2.types.Case): + Required. The case to be created. + This corresponds to the ``case`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.types.Case: + A support case. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, case]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a case_service.CreateCaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, case_service.CreateCaseRequest): + request = case_service.CreateCaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if case is not None: + request.case = case + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_case] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_case( + self, + request: Optional[Union[case_service.UpdateCaseRequest, dict]] = None, + *, + case: Optional[gcs_case.Case] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_case.Case: + r"""Update the specified case. Only a subset of fields + can be updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + def sample_update_case(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.UpdateCaseRequest( + ) + + # Make the request + response = client.update_case(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.support_v2.types.UpdateCaseRequest, dict]): + The request object. The request message for the + UpdateCase endpoint + case (google.cloud.support_v2.types.Case): + Required. The case object to update. + This corresponds to the ``case`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A list of attributes of the case object that should be + updated as part of this request. Supported values are + ``priority``, ``display_name``, and + ``subscriber_email_addresses``. If no fields are + specified, all supported fields are updated. + + WARNING: If you do not provide a field mask, then you + might accidentally clear some fields. For example, if + you leave the field mask empty and do not provide a + value for ``subscriber_email_addresses``, then + ``subscriber_email_addresses`` is updated to empty. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.types.Case: + A support case. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([case, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a case_service.UpdateCaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, case_service.UpdateCaseRequest): + request = case_service.UpdateCaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if case is not None: + request.case = case + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_case] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("case.name", request.case.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def escalate_case( + self, + request: Optional[Union[case_service.EscalateCaseRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> case.Case: + r"""Escalate a case. Escalating a case will initiate the + Google Cloud Support escalation management process. + This operation is only available to certain Customer + Care tiers. Go to https://cloud.google.com/support and + look for 'Technical support escalations' in the feature + list to find out which tiers are able to perform + escalations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + def sample_escalate_case(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.EscalateCaseRequest( + name="name_value", + ) + + # Make the request + response = client.escalate_case(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.support_v2.types.EscalateCaseRequest, dict]): + The request object. The request message for the + EscalateCase endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.types.Case: + A support case. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a case_service.EscalateCaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, case_service.EscalateCaseRequest): + request = case_service.EscalateCaseRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.escalate_case] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def close_case( + self, + request: Optional[Union[case_service.CloseCaseRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> case.Case: + r"""Close the specified case. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + def sample_close_case(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.CloseCaseRequest( + name="name_value", + ) + + # Make the request + response = client.close_case(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.support_v2.types.CloseCaseRequest, dict]): + The request object. The request message for the CloseCase + endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.types.Case: + A support case. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a case_service.CloseCaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, case_service.CloseCaseRequest): + request = case_service.CloseCaseRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.close_case] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def search_case_classifications( + self, + request: Optional[ + Union[case_service.SearchCaseClassificationsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchCaseClassificationsPager: + r"""Retrieve valid classifications to be used when + creating a support case. The classications are + hierarchical, with each classification containing all + levels of the hierarchy, separated by " > ". For example + "Technical Issue > Compute > Compute Engine". + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + def sample_search_case_classifications(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.SearchCaseClassificationsRequest( + ) + + # Make the request + page_result = client.search_case_classifications(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.support_v2.types.SearchCaseClassificationsRequest, dict]): + The request object. The request message for + SearchCaseClassifications endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.services.case_service.pagers.SearchCaseClassificationsPager: + The response message for + SearchCaseClassifications endpoint. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a case_service.SearchCaseClassificationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, case_service.SearchCaseClassificationsRequest): + request = case_service.SearchCaseClassificationsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.search_case_classifications + ] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchCaseClassificationsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CaseServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CaseServiceClient",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_service/pagers.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/pagers.py new file mode 100644 index 000000000000..c9a4d732cce1 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/pagers.py @@ -0,0 +1,415 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.support_v2.types import case, case_service + + +class ListCasesPager: + """A pager for iterating through ``list_cases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.support_v2.types.ListCasesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``cases`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCases`` requests and continue to iterate + through the ``cases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.support_v2.types.ListCasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., case_service.ListCasesResponse], + request: case_service.ListCasesRequest, + response: case_service.ListCasesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.support_v2.types.ListCasesRequest): + The initial request object. + response (google.cloud.support_v2.types.ListCasesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = case_service.ListCasesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[case_service.ListCasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[case.Case]: + for page in self.pages: + yield from page.cases + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListCasesAsyncPager: + """A pager for iterating through ``list_cases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.support_v2.types.ListCasesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``cases`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListCases`` requests and continue to iterate + through the ``cases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.support_v2.types.ListCasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[case_service.ListCasesResponse]], + request: case_service.ListCasesRequest, + response: case_service.ListCasesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.support_v2.types.ListCasesRequest): + The initial request object. + response (google.cloud.support_v2.types.ListCasesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = case_service.ListCasesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[case_service.ListCasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[case.Case]: + async def async_generator(): + async for page in self.pages: + for response in page.cases: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchCasesPager: + """A pager for iterating through ``search_cases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.support_v2.types.SearchCasesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``cases`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchCases`` requests and continue to iterate + through the ``cases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.support_v2.types.SearchCasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., case_service.SearchCasesResponse], + request: case_service.SearchCasesRequest, + response: case_service.SearchCasesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.support_v2.types.SearchCasesRequest): + The initial request object. + response (google.cloud.support_v2.types.SearchCasesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = case_service.SearchCasesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[case_service.SearchCasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[case.Case]: + for page in self.pages: + yield from page.cases + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchCasesAsyncPager: + """A pager for iterating through ``search_cases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.support_v2.types.SearchCasesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``cases`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchCases`` requests and continue to iterate + through the ``cases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.support_v2.types.SearchCasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[case_service.SearchCasesResponse]], + request: case_service.SearchCasesRequest, + response: case_service.SearchCasesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.support_v2.types.SearchCasesRequest): + The initial request object. + response (google.cloud.support_v2.types.SearchCasesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = case_service.SearchCasesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[case_service.SearchCasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[case.Case]: + async def async_generator(): + async for page in self.pages: + for response in page.cases: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchCaseClassificationsPager: + """A pager for iterating through ``search_case_classifications`` requests. + + This class thinly wraps an initial + :class:`google.cloud.support_v2.types.SearchCaseClassificationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``case_classifications`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchCaseClassifications`` requests and continue to iterate + through the ``case_classifications`` field on the + corresponding responses. + + All the usual :class:`google.cloud.support_v2.types.SearchCaseClassificationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., case_service.SearchCaseClassificationsResponse], + request: case_service.SearchCaseClassificationsRequest, + response: case_service.SearchCaseClassificationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.support_v2.types.SearchCaseClassificationsRequest): + The initial request object. + response (google.cloud.support_v2.types.SearchCaseClassificationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = case_service.SearchCaseClassificationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[case_service.SearchCaseClassificationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[case.CaseClassification]: + for page in self.pages: + yield from page.case_classifications + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchCaseClassificationsAsyncPager: + """A pager for iterating through ``search_case_classifications`` requests. + + This class thinly wraps an initial + :class:`google.cloud.support_v2.types.SearchCaseClassificationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``case_classifications`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchCaseClassifications`` requests and continue to iterate + through the ``case_classifications`` field on the + corresponding responses. + + All the usual :class:`google.cloud.support_v2.types.SearchCaseClassificationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[case_service.SearchCaseClassificationsResponse] + ], + request: case_service.SearchCaseClassificationsRequest, + response: case_service.SearchCaseClassificationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.support_v2.types.SearchCaseClassificationsRequest): + The initial request object. + response (google.cloud.support_v2.types.SearchCaseClassificationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = case_service.SearchCaseClassificationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[case_service.SearchCaseClassificationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[case.CaseClassification]: + async def async_generator(): + async for page in self.pages: + for response in page.case_classifications: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_service/transports/__init__.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/transports/__init__.py new file mode 100644 index 000000000000..1093fd5e2565 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CaseServiceTransport +from .grpc import CaseServiceGrpcTransport +from .grpc_asyncio import CaseServiceGrpcAsyncIOTransport +from .rest import CaseServiceRestInterceptor, CaseServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CaseServiceTransport]] +_transport_registry["grpc"] = CaseServiceGrpcTransport +_transport_registry["grpc_asyncio"] = CaseServiceGrpcAsyncIOTransport +_transport_registry["rest"] = CaseServiceRestTransport + +__all__ = ( + "CaseServiceTransport", + "CaseServiceGrpcTransport", + "CaseServiceGrpcAsyncIOTransport", + "CaseServiceRestTransport", + "CaseServiceRestInterceptor", +) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_service/transports/base.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/transports/base.py new file mode 100644 index 000000000000..0e5f7935c607 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/transports/base.py @@ -0,0 +1,294 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.support_v2 import gapic_version as package_version +from google.cloud.support_v2.types import case +from google.cloud.support_v2.types import case as gcs_case +from google.cloud.support_v2.types import case_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class CaseServiceTransport(abc.ABC): + """Abstract transport class for CaseService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "cloudsupport.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_case: gapic_v1.method.wrap_method( + self.get_case, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_cases: gapic_v1.method.wrap_method( + self.list_cases, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.search_cases: gapic_v1.method.wrap_method( + self.search_cases, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_case: gapic_v1.method.wrap_method( + self.create_case, + default_timeout=60.0, + client_info=client_info, + ), + self.update_case: gapic_v1.method.wrap_method( + self.update_case, + default_timeout=60.0, + client_info=client_info, + ), + self.escalate_case: gapic_v1.method.wrap_method( + self.escalate_case, + default_timeout=60.0, + client_info=client_info, + ), + self.close_case: gapic_v1.method.wrap_method( + self.close_case, + default_timeout=60.0, + client_info=client_info, + ), + self.search_case_classifications: gapic_v1.method.wrap_method( + self.search_case_classifications, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_case( + self, + ) -> Callable[ + [case_service.GetCaseRequest], Union[case.Case, Awaitable[case.Case]] + ]: + raise NotImplementedError() + + @property + def list_cases( + self, + ) -> Callable[ + [case_service.ListCasesRequest], + Union[ + case_service.ListCasesResponse, Awaitable[case_service.ListCasesResponse] + ], + ]: + raise NotImplementedError() + + @property + def search_cases( + self, + ) -> Callable[ + [case_service.SearchCasesRequest], + Union[ + case_service.SearchCasesResponse, + Awaitable[case_service.SearchCasesResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_case( + self, + ) -> Callable[ + [case_service.CreateCaseRequest], Union[gcs_case.Case, Awaitable[gcs_case.Case]] + ]: + raise NotImplementedError() + + @property + def update_case( + self, + ) -> Callable[ + [case_service.UpdateCaseRequest], Union[gcs_case.Case, Awaitable[gcs_case.Case]] + ]: + raise NotImplementedError() + + @property + def escalate_case( + self, + ) -> Callable[ + [case_service.EscalateCaseRequest], Union[case.Case, Awaitable[case.Case]] + ]: + raise NotImplementedError() + + @property + def close_case( + self, + ) -> Callable[ + [case_service.CloseCaseRequest], Union[case.Case, Awaitable[case.Case]] + ]: + raise NotImplementedError() + + @property + def search_case_classifications( + self, + ) -> Callable[ + [case_service.SearchCaseClassificationsRequest], + Union[ + case_service.SearchCaseClassificationsResponse, + Awaitable[case_service.SearchCaseClassificationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("CaseServiceTransport",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_service/transports/grpc.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/transports/grpc.py new file mode 100644 index 000000000000..ec32b37f66bc --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/transports/grpc.py @@ -0,0 +1,461 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.cloud.support_v2.types import case +from google.cloud.support_v2.types import case as gcs_case +from google.cloud.support_v2.types import case_service + +from .base import DEFAULT_CLIENT_INFO, CaseServiceTransport + + +class CaseServiceGrpcTransport(CaseServiceTransport): + """gRPC backend transport for CaseService. + + A service to manage Google Cloud support cases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudsupport.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "cloudsupport.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_case(self) -> Callable[[case_service.GetCaseRequest], case.Case]: + r"""Return a callable for the get case method over gRPC. + + Retrieve the specified case. + + Returns: + Callable[[~.GetCaseRequest], + ~.Case]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_case" not in self._stubs: + self._stubs["get_case"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/GetCase", + request_serializer=case_service.GetCaseRequest.serialize, + response_deserializer=case.Case.deserialize, + ) + return self._stubs["get_case"] + + @property + def list_cases( + self, + ) -> Callable[[case_service.ListCasesRequest], case_service.ListCasesResponse]: + r"""Return a callable for the list cases method over gRPC. + + Retrieve all cases under the specified parent. + + Note: Listing cases under an Organization returns only the cases + directly parented by that organization. To retrieve all cases + under an organization, including cases parented by projects + under that organization, use ``cases.search``. + + Returns: + Callable[[~.ListCasesRequest], + ~.ListCasesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_cases" not in self._stubs: + self._stubs["list_cases"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/ListCases", + request_serializer=case_service.ListCasesRequest.serialize, + response_deserializer=case_service.ListCasesResponse.deserialize, + ) + return self._stubs["list_cases"] + + @property + def search_cases( + self, + ) -> Callable[[case_service.SearchCasesRequest], case_service.SearchCasesResponse]: + r"""Return a callable for the search cases method over gRPC. + + Search cases using the specified query. + + Returns: + Callable[[~.SearchCasesRequest], + ~.SearchCasesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_cases" not in self._stubs: + self._stubs["search_cases"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/SearchCases", + request_serializer=case_service.SearchCasesRequest.serialize, + response_deserializer=case_service.SearchCasesResponse.deserialize, + ) + return self._stubs["search_cases"] + + @property + def create_case(self) -> Callable[[case_service.CreateCaseRequest], gcs_case.Case]: + r"""Return a callable for the create case method over gRPC. + + Create a new case and associate it with the given Google Cloud + Resource. The case object must have the following fields set: + ``display_name``, ``description``, ``classification``, and + ``priority``. + + Returns: + Callable[[~.CreateCaseRequest], + ~.Case]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_case" not in self._stubs: + self._stubs["create_case"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/CreateCase", + request_serializer=case_service.CreateCaseRequest.serialize, + response_deserializer=gcs_case.Case.deserialize, + ) + return self._stubs["create_case"] + + @property + def update_case(self) -> Callable[[case_service.UpdateCaseRequest], gcs_case.Case]: + r"""Return a callable for the update case method over gRPC. + + Update the specified case. Only a subset of fields + can be updated. + + Returns: + Callable[[~.UpdateCaseRequest], + ~.Case]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_case" not in self._stubs: + self._stubs["update_case"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/UpdateCase", + request_serializer=case_service.UpdateCaseRequest.serialize, + response_deserializer=gcs_case.Case.deserialize, + ) + return self._stubs["update_case"] + + @property + def escalate_case(self) -> Callable[[case_service.EscalateCaseRequest], case.Case]: + r"""Return a callable for the escalate case method over gRPC. + + Escalate a case. Escalating a case will initiate the + Google Cloud Support escalation management process. + This operation is only available to certain Customer + Care tiers. Go to https://cloud.google.com/support and + look for 'Technical support escalations' in the feature + list to find out which tiers are able to perform + escalations. + + Returns: + Callable[[~.EscalateCaseRequest], + ~.Case]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "escalate_case" not in self._stubs: + self._stubs["escalate_case"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/EscalateCase", + request_serializer=case_service.EscalateCaseRequest.serialize, + response_deserializer=case.Case.deserialize, + ) + return self._stubs["escalate_case"] + + @property + def close_case(self) -> Callable[[case_service.CloseCaseRequest], case.Case]: + r"""Return a callable for the close case method over gRPC. + + Close the specified case. + + Returns: + Callable[[~.CloseCaseRequest], + ~.Case]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "close_case" not in self._stubs: + self._stubs["close_case"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/CloseCase", + request_serializer=case_service.CloseCaseRequest.serialize, + response_deserializer=case.Case.deserialize, + ) + return self._stubs["close_case"] + + @property + def search_case_classifications( + self, + ) -> Callable[ + [case_service.SearchCaseClassificationsRequest], + case_service.SearchCaseClassificationsResponse, + ]: + r"""Return a callable for the search case classifications method over gRPC. + + Retrieve valid classifications to be used when + creating a support case. The classications are + hierarchical, with each classification containing all + levels of the hierarchy, separated by " > ". For example + "Technical Issue > Compute > Compute Engine". + + Returns: + Callable[[~.SearchCaseClassificationsRequest], + ~.SearchCaseClassificationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_case_classifications" not in self._stubs: + self._stubs["search_case_classifications"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/SearchCaseClassifications", + request_serializer=case_service.SearchCaseClassificationsRequest.serialize, + response_deserializer=case_service.SearchCaseClassificationsResponse.deserialize, + ) + return self._stubs["search_case_classifications"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("CaseServiceGrpcTransport",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_service/transports/grpc_asyncio.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..be1da8409c57 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/transports/grpc_asyncio.py @@ -0,0 +1,472 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.support_v2.types import case +from google.cloud.support_v2.types import case as gcs_case +from google.cloud.support_v2.types import case_service + +from .base import DEFAULT_CLIENT_INFO, CaseServiceTransport +from .grpc import CaseServiceGrpcTransport + + +class CaseServiceGrpcAsyncIOTransport(CaseServiceTransport): + """gRPC AsyncIO backend transport for CaseService. + + A service to manage Google Cloud support cases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudsupport.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudsupport.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_case(self) -> Callable[[case_service.GetCaseRequest], Awaitable[case.Case]]: + r"""Return a callable for the get case method over gRPC. + + Retrieve the specified case. + + Returns: + Callable[[~.GetCaseRequest], + Awaitable[~.Case]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_case" not in self._stubs: + self._stubs["get_case"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/GetCase", + request_serializer=case_service.GetCaseRequest.serialize, + response_deserializer=case.Case.deserialize, + ) + return self._stubs["get_case"] + + @property + def list_cases( + self, + ) -> Callable[ + [case_service.ListCasesRequest], Awaitable[case_service.ListCasesResponse] + ]: + r"""Return a callable for the list cases method over gRPC. + + Retrieve all cases under the specified parent. + + Note: Listing cases under an Organization returns only the cases + directly parented by that organization. To retrieve all cases + under an organization, including cases parented by projects + under that organization, use ``cases.search``. + + Returns: + Callable[[~.ListCasesRequest], + Awaitable[~.ListCasesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_cases" not in self._stubs: + self._stubs["list_cases"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/ListCases", + request_serializer=case_service.ListCasesRequest.serialize, + response_deserializer=case_service.ListCasesResponse.deserialize, + ) + return self._stubs["list_cases"] + + @property + def search_cases( + self, + ) -> Callable[ + [case_service.SearchCasesRequest], Awaitable[case_service.SearchCasesResponse] + ]: + r"""Return a callable for the search cases method over gRPC. + + Search cases using the specified query. + + Returns: + Callable[[~.SearchCasesRequest], + Awaitable[~.SearchCasesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_cases" not in self._stubs: + self._stubs["search_cases"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/SearchCases", + request_serializer=case_service.SearchCasesRequest.serialize, + response_deserializer=case_service.SearchCasesResponse.deserialize, + ) + return self._stubs["search_cases"] + + @property + def create_case( + self, + ) -> Callable[[case_service.CreateCaseRequest], Awaitable[gcs_case.Case]]: + r"""Return a callable for the create case method over gRPC. + + Create a new case and associate it with the given Google Cloud + Resource. The case object must have the following fields set: + ``display_name``, ``description``, ``classification``, and + ``priority``. + + Returns: + Callable[[~.CreateCaseRequest], + Awaitable[~.Case]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_case" not in self._stubs: + self._stubs["create_case"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/CreateCase", + request_serializer=case_service.CreateCaseRequest.serialize, + response_deserializer=gcs_case.Case.deserialize, + ) + return self._stubs["create_case"] + + @property + def update_case( + self, + ) -> Callable[[case_service.UpdateCaseRequest], Awaitable[gcs_case.Case]]: + r"""Return a callable for the update case method over gRPC. + + Update the specified case. Only a subset of fields + can be updated. + + Returns: + Callable[[~.UpdateCaseRequest], + Awaitable[~.Case]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_case" not in self._stubs: + self._stubs["update_case"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/UpdateCase", + request_serializer=case_service.UpdateCaseRequest.serialize, + response_deserializer=gcs_case.Case.deserialize, + ) + return self._stubs["update_case"] + + @property + def escalate_case( + self, + ) -> Callable[[case_service.EscalateCaseRequest], Awaitable[case.Case]]: + r"""Return a callable for the escalate case method over gRPC. + + Escalate a case. Escalating a case will initiate the + Google Cloud Support escalation management process. + This operation is only available to certain Customer + Care tiers. Go to https://cloud.google.com/support and + look for 'Technical support escalations' in the feature + list to find out which tiers are able to perform + escalations. + + Returns: + Callable[[~.EscalateCaseRequest], + Awaitable[~.Case]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "escalate_case" not in self._stubs: + self._stubs["escalate_case"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/EscalateCase", + request_serializer=case_service.EscalateCaseRequest.serialize, + response_deserializer=case.Case.deserialize, + ) + return self._stubs["escalate_case"] + + @property + def close_case( + self, + ) -> Callable[[case_service.CloseCaseRequest], Awaitable[case.Case]]: + r"""Return a callable for the close case method over gRPC. + + Close the specified case. + + Returns: + Callable[[~.CloseCaseRequest], + Awaitable[~.Case]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "close_case" not in self._stubs: + self._stubs["close_case"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/CloseCase", + request_serializer=case_service.CloseCaseRequest.serialize, + response_deserializer=case.Case.deserialize, + ) + return self._stubs["close_case"] + + @property + def search_case_classifications( + self, + ) -> Callable[ + [case_service.SearchCaseClassificationsRequest], + Awaitable[case_service.SearchCaseClassificationsResponse], + ]: + r"""Return a callable for the search case classifications method over gRPC. + + Retrieve valid classifications to be used when + creating a support case. The classications are + hierarchical, with each classification containing all + levels of the hierarchy, separated by " > ". For example + "Technical Issue > Compute > Compute Engine". + + Returns: + Callable[[~.SearchCaseClassificationsRequest], + Awaitable[~.SearchCaseClassificationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_case_classifications" not in self._stubs: + self._stubs["search_case_classifications"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CaseService/SearchCaseClassifications", + request_serializer=case_service.SearchCaseClassificationsRequest.serialize, + response_deserializer=case_service.SearchCaseClassificationsResponse.deserialize, + ) + return self._stubs["search_case_classifications"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("CaseServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/case_service/transports/rest.py b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/transports/rest.py new file mode 100644 index 000000000000..6719c55a2575 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/case_service/transports/rest.py @@ -0,0 +1,1225 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.support_v2.types import case +from google.cloud.support_v2.types import case as gcs_case +from google.cloud.support_v2.types import case_service + +from .base import CaseServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CaseServiceRestInterceptor: + """Interceptor for CaseService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CaseServiceRestTransport. + + .. code-block:: python + class MyCustomCaseServiceInterceptor(CaseServiceRestInterceptor): + def pre_close_case(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_close_case(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_case(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_case(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_escalate_case(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_escalate_case(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_case(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_case(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_cases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_cases(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_search_case_classifications(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_case_classifications(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_search_cases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_cases(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_case(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_case(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CaseServiceRestTransport(interceptor=MyCustomCaseServiceInterceptor()) + client = CaseServiceClient(transport=transport) + + + """ + + def pre_close_case( + self, + request: case_service.CloseCaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[case_service.CloseCaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for close_case + + Override in a subclass to manipulate the request or metadata + before they are sent to the CaseService server. + """ + return request, metadata + + def post_close_case(self, response: case.Case) -> case.Case: + """Post-rpc interceptor for close_case + + Override in a subclass to manipulate the response + after it is returned by the CaseService server but before + it is returned to user code. + """ + return response + + def pre_create_case( + self, + request: case_service.CreateCaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[case_service.CreateCaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_case + + Override in a subclass to manipulate the request or metadata + before they are sent to the CaseService server. + """ + return request, metadata + + def post_create_case(self, response: gcs_case.Case) -> gcs_case.Case: + """Post-rpc interceptor for create_case + + Override in a subclass to manipulate the response + after it is returned by the CaseService server but before + it is returned to user code. + """ + return response + + def pre_escalate_case( + self, + request: case_service.EscalateCaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[case_service.EscalateCaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for escalate_case + + Override in a subclass to manipulate the request or metadata + before they are sent to the CaseService server. + """ + return request, metadata + + def post_escalate_case(self, response: case.Case) -> case.Case: + """Post-rpc interceptor for escalate_case + + Override in a subclass to manipulate the response + after it is returned by the CaseService server but before + it is returned to user code. + """ + return response + + def pre_get_case( + self, request: case_service.GetCaseRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[case_service.GetCaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_case + + Override in a subclass to manipulate the request or metadata + before they are sent to the CaseService server. + """ + return request, metadata + + def post_get_case(self, response: case.Case) -> case.Case: + """Post-rpc interceptor for get_case + + Override in a subclass to manipulate the response + after it is returned by the CaseService server but before + it is returned to user code. + """ + return response + + def pre_list_cases( + self, + request: case_service.ListCasesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[case_service.ListCasesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_cases + + Override in a subclass to manipulate the request or metadata + before they are sent to the CaseService server. + """ + return request, metadata + + def post_list_cases( + self, response: case_service.ListCasesResponse + ) -> case_service.ListCasesResponse: + """Post-rpc interceptor for list_cases + + Override in a subclass to manipulate the response + after it is returned by the CaseService server but before + it is returned to user code. + """ + return response + + def pre_search_case_classifications( + self, + request: case_service.SearchCaseClassificationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + case_service.SearchCaseClassificationsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for search_case_classifications + + Override in a subclass to manipulate the request or metadata + before they are sent to the CaseService server. + """ + return request, metadata + + def post_search_case_classifications( + self, response: case_service.SearchCaseClassificationsResponse + ) -> case_service.SearchCaseClassificationsResponse: + """Post-rpc interceptor for search_case_classifications + + Override in a subclass to manipulate the response + after it is returned by the CaseService server but before + it is returned to user code. + """ + return response + + def pre_search_cases( + self, + request: case_service.SearchCasesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[case_service.SearchCasesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for search_cases + + Override in a subclass to manipulate the request or metadata + before they are sent to the CaseService server. + """ + return request, metadata + + def post_search_cases( + self, response: case_service.SearchCasesResponse + ) -> case_service.SearchCasesResponse: + """Post-rpc interceptor for search_cases + + Override in a subclass to manipulate the response + after it is returned by the CaseService server but before + it is returned to user code. + """ + return response + + def pre_update_case( + self, + request: case_service.UpdateCaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[case_service.UpdateCaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_case + + Override in a subclass to manipulate the request or metadata + before they are sent to the CaseService server. + """ + return request, metadata + + def post_update_case(self, response: gcs_case.Case) -> gcs_case.Case: + """Post-rpc interceptor for update_case + + Override in a subclass to manipulate the response + after it is returned by the CaseService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CaseServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CaseServiceRestInterceptor + + +class CaseServiceRestTransport(CaseServiceTransport): + """REST backend transport for CaseService. + + A service to manage Google Cloud support cases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "cloudsupport.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CaseServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CaseServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CloseCase(CaseServiceRestStub): + def __hash__(self): + return hash("CloseCase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: case_service.CloseCaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> case.Case: + r"""Call the close case method over HTTP. + + Args: + request (~.case_service.CloseCaseRequest): + The request object. The request message for the CloseCase + endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.case.Case: + A support case. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/cases/*}:close", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{name=organizations/*/cases/*}:close", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_close_case(request, metadata) + pb_request = case_service.CloseCaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = case.Case() + pb_resp = case.Case.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_close_case(resp) + return resp + + class _CreateCase(CaseServiceRestStub): + def __hash__(self): + return hash("CreateCase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: case_service.CreateCaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_case.Case: + r"""Call the create case method over HTTP. + + Args: + request (~.case_service.CreateCaseRequest): + The request object. The request message for the + CreateCase endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcs_case.Case: + A support case. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*}/cases", + "body": "case", + }, + { + "method": "post", + "uri": "/v2/{parent=organizations/*}/cases", + "body": "case", + }, + ] + request, metadata = self._interceptor.pre_create_case(request, metadata) + pb_request = case_service.CreateCaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcs_case.Case() + pb_resp = gcs_case.Case.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_case(resp) + return resp + + class _EscalateCase(CaseServiceRestStub): + def __hash__(self): + return hash("EscalateCase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: case_service.EscalateCaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> case.Case: + r"""Call the escalate case method over HTTP. + + Args: + request (~.case_service.EscalateCaseRequest): + The request object. The request message for the + EscalateCase endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.case.Case: + A support case. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/cases/*}:escalate", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{name=organizations/*/cases/*}:escalate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_escalate_case(request, metadata) + pb_request = case_service.EscalateCaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = case.Case() + pb_resp = case.Case.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_escalate_case(resp) + return resp + + class _GetCase(CaseServiceRestStub): + def __hash__(self): + return hash("GetCase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: case_service.GetCaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> case.Case: + r"""Call the get case method over HTTP. + + Args: + request (~.case_service.GetCaseRequest): + The request object. The request message for the GetCase + endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.case.Case: + A support case. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/cases/*}", + }, + { + "method": "get", + "uri": "/v2/{name=organizations/*/cases/*}", + }, + ] + request, metadata = self._interceptor.pre_get_case(request, metadata) + pb_request = case_service.GetCaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = case.Case() + pb_resp = case.Case.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_case(resp) + return resp + + class _ListCases(CaseServiceRestStub): + def __hash__(self): + return hash("ListCases") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: case_service.ListCasesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> case_service.ListCasesResponse: + r"""Call the list cases method over HTTP. + + Args: + request (~.case_service.ListCasesRequest): + The request object. The request message for the ListCases + endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.case_service.ListCasesResponse: + The response message for the + ListCases endpoint. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=projects/*}/cases", + }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*}/cases", + }, + ] + request, metadata = self._interceptor.pre_list_cases(request, metadata) + pb_request = case_service.ListCasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = case_service.ListCasesResponse() + pb_resp = case_service.ListCasesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_cases(resp) + return resp + + class _SearchCaseClassifications(CaseServiceRestStub): + def __hash__(self): + return hash("SearchCaseClassifications") + + def __call__( + self, + request: case_service.SearchCaseClassificationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> case_service.SearchCaseClassificationsResponse: + r"""Call the search case + classifications method over HTTP. + + Args: + request (~.case_service.SearchCaseClassificationsRequest): + The request object. The request message for + SearchCaseClassifications endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.case_service.SearchCaseClassificationsResponse: + The response message for + SearchCaseClassifications endpoint. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/caseClassifications:search", + }, + ] + request, metadata = self._interceptor.pre_search_case_classifications( + request, metadata + ) + pb_request = case_service.SearchCaseClassificationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = case_service.SearchCaseClassificationsResponse() + pb_resp = case_service.SearchCaseClassificationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_case_classifications(resp) + return resp + + class _SearchCases(CaseServiceRestStub): + def __hash__(self): + return hash("SearchCases") + + def __call__( + self, + request: case_service.SearchCasesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> case_service.SearchCasesResponse: + r"""Call the search cases method over HTTP. + + Args: + request (~.case_service.SearchCasesRequest): + The request object. The request message for the + SearchCases endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.case_service.SearchCasesResponse: + The response message for the + SearchCases endpoint. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=projects/*}/cases:search", + }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*}/cases:search", + }, + ] + request, metadata = self._interceptor.pre_search_cases(request, metadata) + pb_request = case_service.SearchCasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = case_service.SearchCasesResponse() + pb_resp = case_service.SearchCasesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_cases(resp) + return resp + + class _UpdateCase(CaseServiceRestStub): + def __hash__(self): + return hash("UpdateCase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: case_service.UpdateCaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_case.Case: + r"""Call the update case method over HTTP. + + Args: + request (~.case_service.UpdateCaseRequest): + The request object. The request message for the + UpdateCase endpoint + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcs_case.Case: + A support case. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2/{case.name=projects/*/cases/*}", + "body": "case", + }, + { + "method": "patch", + "uri": "/v2/{case.name=organizations/*/cases/*}", + "body": "case", + }, + ] + request, metadata = self._interceptor.pre_update_case(request, metadata) + pb_request = case_service.UpdateCaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcs_case.Case() + pb_resp = gcs_case.Case.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_case(resp) + return resp + + @property + def close_case(self) -> Callable[[case_service.CloseCaseRequest], case.Case]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CloseCase(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_case(self) -> Callable[[case_service.CreateCaseRequest], gcs_case.Case]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCase(self._session, self._host, self._interceptor) # type: ignore + + @property + def escalate_case(self) -> Callable[[case_service.EscalateCaseRequest], case.Case]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EscalateCase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_case(self) -> Callable[[case_service.GetCaseRequest], case.Case]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCase(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_cases( + self, + ) -> Callable[[case_service.ListCasesRequest], case_service.ListCasesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCases(self._session, self._host, self._interceptor) # type: ignore + + @property + def search_case_classifications( + self, + ) -> Callable[ + [case_service.SearchCaseClassificationsRequest], + case_service.SearchCaseClassificationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchCaseClassifications(self._session, self._host, self._interceptor) # type: ignore + + @property + def search_cases( + self, + ) -> Callable[[case_service.SearchCasesRequest], case_service.SearchCasesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchCases(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_case(self) -> Callable[[case_service.UpdateCaseRequest], gcs_case.Case]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateCase(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CaseServiceRestTransport",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/__init__.py b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/__init__.py new file mode 100644 index 000000000000..99572e7a6da7 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import CommentServiceAsyncClient +from .client import CommentServiceClient + +__all__ = ( + "CommentServiceClient", + "CommentServiceAsyncClient", +) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/async_client.py b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/async_client.py new file mode 100644 index 000000000000..6485eccabed5 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/async_client.py @@ -0,0 +1,476 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.support_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.support_v2.services.comment_service import pagers +from google.cloud.support_v2.types import actor +from google.cloud.support_v2.types import comment +from google.cloud.support_v2.types import comment as gcs_comment +from google.cloud.support_v2.types import comment_service + +from .client import CommentServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, CommentServiceTransport +from .transports.grpc_asyncio import CommentServiceGrpcAsyncIOTransport + + +class CommentServiceAsyncClient: + """A service to manage comments on cases.""" + + _client: CommentServiceClient + + DEFAULT_ENDPOINT = CommentServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CommentServiceClient.DEFAULT_MTLS_ENDPOINT + + case_path = staticmethod(CommentServiceClient.case_path) + parse_case_path = staticmethod(CommentServiceClient.parse_case_path) + comment_path = staticmethod(CommentServiceClient.comment_path) + parse_comment_path = staticmethod(CommentServiceClient.parse_comment_path) + common_billing_account_path = staticmethod( + CommentServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CommentServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(CommentServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + CommentServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + CommentServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + CommentServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(CommentServiceClient.common_project_path) + parse_common_project_path = staticmethod( + CommentServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(CommentServiceClient.common_location_path) + parse_common_location_path = staticmethod( + CommentServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CommentServiceAsyncClient: The constructed client. + """ + return CommentServiceClient.from_service_account_info.__func__(CommentServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CommentServiceAsyncClient: The constructed client. + """ + return CommentServiceClient.from_service_account_file.__func__(CommentServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CommentServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CommentServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CommentServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(CommentServiceClient).get_transport_class, type(CommentServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CommentServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the comment service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CommentServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CommentServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_comments( + self, + request: Optional[Union[comment_service.ListCommentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCommentsAsyncPager: + r"""Retrieve all Comments associated with the Case + object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + async def sample_list_comments(): + # Create a client + client = support_v2.CommentServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.ListCommentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_comments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.support_v2.types.ListCommentsRequest, dict]]): + The request object. The request message for the + ListComments endpoint. + parent (:class:`str`): + Required. The resource name of Case + object for which comments should be + listed. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.services.comment_service.pagers.ListCommentsAsyncPager: + The response message for the + ListComments endpoint. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = comment_service.ListCommentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_comments, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListCommentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_comment( + self, + request: Optional[Union[comment_service.CreateCommentRequest, dict]] = None, + *, + parent: Optional[str] = None, + comment: Optional[gcs_comment.Comment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_comment.Comment: + r"""Add a new comment to the specified Case. + The comment object must have the following fields set: + body. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + async def sample_create_comment(): + # Create a client + client = support_v2.CommentServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.CreateCommentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_comment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.support_v2.types.CreateCommentRequest, dict]]): + The request object. The request message for CreateComment + endpoint. + parent (:class:`str`): + Required. The resource name of Case + to which this comment should be added. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + comment (:class:`google.cloud.support_v2.types.Comment`): + Required. The Comment object to be + added to this Case. + + This corresponds to the ``comment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.types.Comment: + A comment associated with a support + case. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, comment]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = comment_service.CreateCommentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if comment is not None: + request.comment = comment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_comment, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CommentServiceAsyncClient",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/client.py b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/client.py new file mode 100644 index 000000000000..f9ae0a8547d7 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/client.py @@ -0,0 +1,718 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.support_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.support_v2.services.comment_service import pagers +from google.cloud.support_v2.types import actor +from google.cloud.support_v2.types import comment +from google.cloud.support_v2.types import comment as gcs_comment +from google.cloud.support_v2.types import comment_service + +from .transports.base import DEFAULT_CLIENT_INFO, CommentServiceTransport +from .transports.grpc import CommentServiceGrpcTransport +from .transports.grpc_asyncio import CommentServiceGrpcAsyncIOTransport +from .transports.rest import CommentServiceRestTransport + + +class CommentServiceClientMeta(type): + """Metaclass for the CommentService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[CommentServiceTransport]] + _transport_registry["grpc"] = CommentServiceGrpcTransport + _transport_registry["grpc_asyncio"] = CommentServiceGrpcAsyncIOTransport + _transport_registry["rest"] = CommentServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CommentServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CommentServiceClient(metaclass=CommentServiceClientMeta): + """A service to manage comments on cases.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudsupport.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CommentServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CommentServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CommentServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CommentServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def case_path( + organization: str, + case: str, + ) -> str: + """Returns a fully-qualified case string.""" + return "organizations/{organization}/cases/{case}".format( + organization=organization, + case=case, + ) + + @staticmethod + def parse_case_path(path: str) -> Dict[str, str]: + """Parses a case path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/cases/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def comment_path( + organization: str, + case: str, + comment: str, + ) -> str: + """Returns a fully-qualified comment string.""" + return "organizations/{organization}/cases/{case}/comments/{comment}".format( + organization=organization, + case=case, + comment=comment, + ) + + @staticmethod + def parse_comment_path(path: str) -> Dict[str, str]: + """Parses a comment path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/cases/(?P.+?)/comments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CommentServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the comment service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CommentServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CommentServiceTransport): + # transport is a CommentServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_comments( + self, + request: Optional[Union[comment_service.ListCommentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCommentsPager: + r"""Retrieve all Comments associated with the Case + object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + def sample_list_comments(): + # Create a client + client = support_v2.CommentServiceClient() + + # Initialize request argument(s) + request = support_v2.ListCommentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_comments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.support_v2.types.ListCommentsRequest, dict]): + The request object. The request message for the + ListComments endpoint. + parent (str): + Required. The resource name of Case + object for which comments should be + listed. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.services.comment_service.pagers.ListCommentsPager: + The response message for the + ListComments endpoint. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a comment_service.ListCommentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, comment_service.ListCommentsRequest): + request = comment_service.ListCommentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_comments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCommentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_comment( + self, + request: Optional[Union[comment_service.CreateCommentRequest, dict]] = None, + *, + parent: Optional[str] = None, + comment: Optional[gcs_comment.Comment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_comment.Comment: + r"""Add a new comment to the specified Case. + The comment object must have the following fields set: + body. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import support_v2 + + def sample_create_comment(): + # Create a client + client = support_v2.CommentServiceClient() + + # Initialize request argument(s) + request = support_v2.CreateCommentRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_comment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.support_v2.types.CreateCommentRequest, dict]): + The request object. The request message for CreateComment + endpoint. + parent (str): + Required. The resource name of Case + to which this comment should be added. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + comment (google.cloud.support_v2.types.Comment): + Required. The Comment object to be + added to this Case. + + This corresponds to the ``comment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.support_v2.types.Comment: + A comment associated with a support + case. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, comment]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a comment_service.CreateCommentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, comment_service.CreateCommentRequest): + request = comment_service.CreateCommentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if comment is not None: + request.comment = comment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_comment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CommentServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CommentServiceClient",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/pagers.py b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/pagers.py new file mode 100644 index 000000000000..667047db70a4 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.support_v2.types import comment, comment_service + + +class ListCommentsPager: + """A pager for iterating through ``list_comments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.support_v2.types.ListCommentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``comments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListComments`` requests and continue to iterate + through the ``comments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.support_v2.types.ListCommentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., comment_service.ListCommentsResponse], + request: comment_service.ListCommentsRequest, + response: comment_service.ListCommentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.support_v2.types.ListCommentsRequest): + The initial request object. + response (google.cloud.support_v2.types.ListCommentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = comment_service.ListCommentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[comment_service.ListCommentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[comment.Comment]: + for page in self.pages: + yield from page.comments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListCommentsAsyncPager: + """A pager for iterating through ``list_comments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.support_v2.types.ListCommentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``comments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListComments`` requests and continue to iterate + through the ``comments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.support_v2.types.ListCommentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[comment_service.ListCommentsResponse]], + request: comment_service.ListCommentsRequest, + response: comment_service.ListCommentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.support_v2.types.ListCommentsRequest): + The initial request object. + response (google.cloud.support_v2.types.ListCommentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = comment_service.ListCommentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[comment_service.ListCommentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[comment.Comment]: + async def async_generator(): + async for page in self.pages: + for response in page.comments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/transports/__init__.py b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/transports/__init__.py new file mode 100644 index 000000000000..5eec969d818a --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CommentServiceTransport +from .grpc import CommentServiceGrpcTransport +from .grpc_asyncio import CommentServiceGrpcAsyncIOTransport +from .rest import CommentServiceRestInterceptor, CommentServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CommentServiceTransport]] +_transport_registry["grpc"] = CommentServiceGrpcTransport +_transport_registry["grpc_asyncio"] = CommentServiceGrpcAsyncIOTransport +_transport_registry["rest"] = CommentServiceRestTransport + +__all__ = ( + "CommentServiceTransport", + "CommentServiceGrpcTransport", + "CommentServiceGrpcAsyncIOTransport", + "CommentServiceRestTransport", + "CommentServiceRestInterceptor", +) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/transports/base.py b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/transports/base.py new file mode 100644 index 000000000000..520f80635862 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/transports/base.py @@ -0,0 +1,182 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.support_v2 import gapic_version as package_version +from google.cloud.support_v2.types import comment as gcs_comment +from google.cloud.support_v2.types import comment_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class CommentServiceTransport(abc.ABC): + """Abstract transport class for CommentService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "cloudsupport.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_comments: gapic_v1.method.wrap_method( + self.list_comments, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_comment: gapic_v1.method.wrap_method( + self.create_comment, + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_comments( + self, + ) -> Callable[ + [comment_service.ListCommentsRequest], + Union[ + comment_service.ListCommentsResponse, + Awaitable[comment_service.ListCommentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_comment( + self, + ) -> Callable[ + [comment_service.CreateCommentRequest], + Union[gcs_comment.Comment, Awaitable[gcs_comment.Comment]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("CommentServiceTransport",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/transports/grpc.py b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/transports/grpc.py new file mode 100644 index 000000000000..2f01012866a9 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/transports/grpc.py @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.cloud.support_v2.types import comment as gcs_comment +from google.cloud.support_v2.types import comment_service + +from .base import DEFAULT_CLIENT_INFO, CommentServiceTransport + + +class CommentServiceGrpcTransport(CommentServiceTransport): + """gRPC backend transport for CommentService. + + A service to manage comments on cases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudsupport.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "cloudsupport.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_comments( + self, + ) -> Callable[ + [comment_service.ListCommentsRequest], comment_service.ListCommentsResponse + ]: + r"""Return a callable for the list comments method over gRPC. + + Retrieve all Comments associated with the Case + object. + + Returns: + Callable[[~.ListCommentsRequest], + ~.ListCommentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_comments" not in self._stubs: + self._stubs["list_comments"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CommentService/ListComments", + request_serializer=comment_service.ListCommentsRequest.serialize, + response_deserializer=comment_service.ListCommentsResponse.deserialize, + ) + return self._stubs["list_comments"] + + @property + def create_comment( + self, + ) -> Callable[[comment_service.CreateCommentRequest], gcs_comment.Comment]: + r"""Return a callable for the create comment method over gRPC. + + Add a new comment to the specified Case. + The comment object must have the following fields set: + body. + + Returns: + Callable[[~.CreateCommentRequest], + ~.Comment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_comment" not in self._stubs: + self._stubs["create_comment"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CommentService/CreateComment", + request_serializer=comment_service.CreateCommentRequest.serialize, + response_deserializer=gcs_comment.Comment.deserialize, + ) + return self._stubs["create_comment"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("CommentServiceGrpcTransport",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/transports/grpc_asyncio.py b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a0996c42986f --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/transports/grpc_asyncio.py @@ -0,0 +1,299 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.support_v2.types import comment as gcs_comment +from google.cloud.support_v2.types import comment_service + +from .base import DEFAULT_CLIENT_INFO, CommentServiceTransport +from .grpc import CommentServiceGrpcTransport + + +class CommentServiceGrpcAsyncIOTransport(CommentServiceTransport): + """gRPC AsyncIO backend transport for CommentService. + + A service to manage comments on cases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudsupport.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudsupport.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_comments( + self, + ) -> Callable[ + [comment_service.ListCommentsRequest], + Awaitable[comment_service.ListCommentsResponse], + ]: + r"""Return a callable for the list comments method over gRPC. + + Retrieve all Comments associated with the Case + object. + + Returns: + Callable[[~.ListCommentsRequest], + Awaitable[~.ListCommentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_comments" not in self._stubs: + self._stubs["list_comments"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CommentService/ListComments", + request_serializer=comment_service.ListCommentsRequest.serialize, + response_deserializer=comment_service.ListCommentsResponse.deserialize, + ) + return self._stubs["list_comments"] + + @property + def create_comment( + self, + ) -> Callable[ + [comment_service.CreateCommentRequest], Awaitable[gcs_comment.Comment] + ]: + r"""Return a callable for the create comment method over gRPC. + + Add a new comment to the specified Case. + The comment object must have the following fields set: + body. + + Returns: + Callable[[~.CreateCommentRequest], + Awaitable[~.Comment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_comment" not in self._stubs: + self._stubs["create_comment"] = self.grpc_channel.unary_unary( + "/google.cloud.support.v2.CommentService/CreateComment", + request_serializer=comment_service.CreateCommentRequest.serialize, + response_deserializer=gcs_comment.Comment.deserialize, + ) + return self._stubs["create_comment"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("CommentServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/transports/rest.py b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/transports/rest.py new file mode 100644 index 000000000000..5f563b51b425 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/services/comment_service/transports/rest.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.support_v2.types import comment as gcs_comment +from google.cloud.support_v2.types import comment_service + +from .base import CommentServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CommentServiceRestInterceptor: + """Interceptor for CommentService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CommentServiceRestTransport. + + .. code-block:: python + class MyCustomCommentServiceInterceptor(CommentServiceRestInterceptor): + def pre_create_comment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_comment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_comments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_comments(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CommentServiceRestTransport(interceptor=MyCustomCommentServiceInterceptor()) + client = CommentServiceClient(transport=transport) + + + """ + + def pre_create_comment( + self, + request: comment_service.CreateCommentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[comment_service.CreateCommentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_comment + + Override in a subclass to manipulate the request or metadata + before they are sent to the CommentService server. + """ + return request, metadata + + def post_create_comment(self, response: gcs_comment.Comment) -> gcs_comment.Comment: + """Post-rpc interceptor for create_comment + + Override in a subclass to manipulate the response + after it is returned by the CommentService server but before + it is returned to user code. + """ + return response + + def pre_list_comments( + self, + request: comment_service.ListCommentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[comment_service.ListCommentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_comments + + Override in a subclass to manipulate the request or metadata + before they are sent to the CommentService server. + """ + return request, metadata + + def post_list_comments( + self, response: comment_service.ListCommentsResponse + ) -> comment_service.ListCommentsResponse: + """Post-rpc interceptor for list_comments + + Override in a subclass to manipulate the response + after it is returned by the CommentService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CommentServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CommentServiceRestInterceptor + + +class CommentServiceRestTransport(CommentServiceTransport): + """REST backend transport for CommentService. + + A service to manage comments on cases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "cloudsupport.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CommentServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CommentServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateComment(CommentServiceRestStub): + def __hash__(self): + return hash("CreateComment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: comment_service.CreateCommentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_comment.Comment: + r"""Call the create comment method over HTTP. + + Args: + request (~.comment_service.CreateCommentRequest): + The request object. The request message for CreateComment + endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcs_comment.Comment: + A comment associated with a support + case. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*/cases/*}/comments", + "body": "comment", + }, + { + "method": "post", + "uri": "/v2/{parent=organizations/*/cases/*}/comments", + "body": "comment", + }, + ] + request, metadata = self._interceptor.pre_create_comment(request, metadata) + pb_request = comment_service.CreateCommentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcs_comment.Comment() + pb_resp = gcs_comment.Comment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_comment(resp) + return resp + + class _ListComments(CommentServiceRestStub): + def __hash__(self): + return hash("ListComments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: comment_service.ListCommentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> comment_service.ListCommentsResponse: + r"""Call the list comments method over HTTP. + + Args: + request (~.comment_service.ListCommentsRequest): + The request object. The request message for the + ListComments endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.comment_service.ListCommentsResponse: + The response message for the + ListComments endpoint. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=projects/*/cases/*}/comments", + }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*/cases/*}/comments", + }, + ] + request, metadata = self._interceptor.pre_list_comments(request, metadata) + pb_request = comment_service.ListCommentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = comment_service.ListCommentsResponse() + pb_resp = comment_service.ListCommentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_comments(resp) + return resp + + @property + def create_comment( + self, + ) -> Callable[[comment_service.CreateCommentRequest], gcs_comment.Comment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateComment(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_comments( + self, + ) -> Callable[ + [comment_service.ListCommentsRequest], comment_service.ListCommentsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListComments(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CommentServiceRestTransport",) diff --git a/packages/google-cloud-support/google/cloud/support_v2/types/__init__.py b/packages/google-cloud-support/google/cloud/support_v2/types/__init__.py new file mode 100644 index 000000000000..03c242052bf6 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/types/__init__.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .actor import Actor +from .attachment import Attachment +from .attachment_service import ListAttachmentsRequest, ListAttachmentsResponse +from .case import Case, CaseClassification +from .case_service import ( + CloseCaseRequest, + CreateCaseRequest, + EscalateCaseRequest, + GetCaseRequest, + ListCasesRequest, + ListCasesResponse, + SearchCaseClassificationsRequest, + SearchCaseClassificationsResponse, + SearchCasesRequest, + SearchCasesResponse, + UpdateCaseRequest, +) +from .comment import Comment +from .comment_service import ( + CreateCommentRequest, + ListCommentsRequest, + ListCommentsResponse, +) +from .escalation import Escalation + +__all__ = ( + "Actor", + "Attachment", + "ListAttachmentsRequest", + "ListAttachmentsResponse", + "Case", + "CaseClassification", + "CloseCaseRequest", + "CreateCaseRequest", + "EscalateCaseRequest", + "GetCaseRequest", + "ListCasesRequest", + "ListCasesResponse", + "SearchCaseClassificationsRequest", + "SearchCaseClassificationsResponse", + "SearchCasesRequest", + "SearchCasesResponse", + "UpdateCaseRequest", + "Comment", + "CreateCommentRequest", + "ListCommentsRequest", + "ListCommentsResponse", + "Escalation", +) diff --git a/packages/google-cloud-support/google/cloud/support_v2/types/actor.py b/packages/google-cloud-support/google/cloud/support_v2/types/actor.py new file mode 100644 index 000000000000..05b266e9f0ad --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/types/actor.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.support.v2", + manifest={ + "Actor", + }, +) + + +class Actor(proto.Message): + r"""An object containing information about the effective user and + authenticated principal responsible for an action. + + Attributes: + display_name (str): + The name to display for the actor. If not + provided, it is inferred from credentials + supplied during case creation. When an email is + provided, a display name must also be provided. + This will be obfuscated if the user is a Google + Support agent. + email (str): + The email address of the actor. If not + provided, it is inferred from credentials + supplied during case creation. If the + authenticated principal does not have an email + address, one must be provided. When a name is + provided, an email must also be provided. This + will be obfuscated if the user is a Google + Support agent. + google_support (bool): + Output only. Whether the actor is a Google + support actor. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + email: str = proto.Field( + proto.STRING, + number=2, + ) + google_support: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-support/google/cloud/support_v2/types/attachment.py b/packages/google-cloud-support/google/cloud/support_v2/types/attachment.py new file mode 100644 index 000000000000..b7b2778087cb --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/types/attachment.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.support_v2.types import actor + +__protobuf__ = proto.module( + package="google.cloud.support.v2", + manifest={ + "Attachment", + }, +) + + +class Attachment(proto.Message): + r"""Represents a file attached to a support case. + + Attributes: + name (str): + Output only. The resource name of the + attachment. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the attachment + was created. + creator (google.cloud.support_v2.types.Actor): + Output only. The user who uploaded the + attachment. Note, the name and email will be + obfuscated if the attachment was uploaded by + Google support. + filename (str): + The filename of the attachment (e.g. ``"graph.jpg"``). + mime_type (str): + Output only. The MIME type of the attachment + (e.g. text/plain). + size_bytes (int): + Output only. The size of the attachment in + bytes. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + creator: actor.Actor = proto.Field( + proto.MESSAGE, + number=3, + message=actor.Actor, + ) + filename: str = proto.Field( + proto.STRING, + number=4, + ) + mime_type: str = proto.Field( + proto.STRING, + number=5, + ) + size_bytes: int = proto.Field( + proto.INT64, + number=6, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-support/google/cloud/support_v2/types/attachment_service.py b/packages/google-cloud-support/google/cloud/support_v2/types/attachment_service.py new file mode 100644 index 000000000000..45f1e92bc19c --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/types/attachment_service.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.support_v2.types import attachment + +__protobuf__ = proto.module( + package="google.cloud.support.v2", + manifest={ + "ListAttachmentsRequest", + "ListAttachmentsResponse", + }, +) + + +class ListAttachmentsRequest(proto.Message): + r"""The request message for the ListAttachments endpoint. + + Attributes: + parent (str): + Required. The resource name of Case object + for which attachments should be listed. + page_size (int): + The maximum number of attachments fetched + with each request. If not provided, the default + is 10. The maximum page size that will be + returned is 100. + page_token (str): + A token identifying the page of results to + return. If unspecified, the first page is + retrieved. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAttachmentsResponse(proto.Message): + r"""The response message for the ListAttachments endpoint. + + Attributes: + attachments (MutableSequence[google.cloud.support_v2.types.Attachment]): + The list of attachments associated with the + given case. + next_page_token (str): + A token to retrieve the next page of results. This should be + set in the ``page_token`` field of subsequent + ``cases.attachments.list`` requests. If unspecified, there + are no more results to retrieve. + """ + + @property + def raw_page(self): + return self + + attachments: MutableSequence[attachment.Attachment] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=attachment.Attachment, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-support/google/cloud/support_v2/types/case.py b/packages/google-cloud-support/google/cloud/support_v2/types/case.py new file mode 100644 index 000000000000..d9af5cab5b59 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/types/case.py @@ -0,0 +1,244 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.support_v2.types import actor + +__protobuf__ = proto.module( + package="google.cloud.support.v2", + manifest={ + "Case", + "CaseClassification", + }, +) + + +class Case(proto.Message): + r"""A support case. + + Attributes: + name (str): + The resource name for the case. + display_name (str): + The short summary of the issue reported in + this case. + description (str): + A broad description of the issue. + classification (google.cloud.support_v2.types.CaseClassification): + The issue classification applicable to this + case. + time_zone (str): + The timezone of the user who created the + support case. It should be in a format IANA + recognizes: https://www.iana.org/time-zones. + There is no additional validation done by the + API. + subscriber_email_addresses (MutableSequence[str]): + The email addresses to receive updates on + this case. + state (google.cloud.support_v2.types.Case.State): + Output only. The current status of the + support case. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time this case was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time this case was last + updated. + creator (google.cloud.support_v2.types.Actor): + The user who created the case. + Note: The name and email will be obfuscated if + the case was created by Google Support. + contact_email (str): + A user-supplied email address to send case + update notifications for. This should only be + used in BYOID flows, where we cannot infer the + user's email address directly from their EUCs. + escalated (bool): + Whether the case is currently escalated. + test_case (bool): + Whether this case was created for internal + API testing and should not be acted on by the + support team. + language_code (str): + The language the user has requested to receive support in. + This should be a BCP 47 language code (e.g., ``"en"``, + ``"zh-CN"``, ``"zh-TW"``, ``"ja"``, ``"ko"``). If no + language or an unsupported language is specified, this field + defaults to English (en). + + Language selection during case creation may affect your + available support options. For a list of supported languages + and their support working hours, see: + https://cloud.google.com/support/docs/language-working-hours + priority (google.cloud.support_v2.types.Case.Priority): + The priority of this case. + """ + + class State(proto.Enum): + r"""The status of a support case. + + Values: + STATE_UNSPECIFIED (0): + Case is in an unknown state. + NEW (1): + The case has been created but no one is + assigned to work on it yet. + IN_PROGRESS_GOOGLE_SUPPORT (2): + The case is currently being handled by Google + support. + ACTION_REQUIRED (3): + Google is waiting for a response. + SOLUTION_PROVIDED (4): + A solution has been offered for the case, but + it isn't yet closed. + CLOSED (5): + The case has been resolved. + """ + STATE_UNSPECIFIED = 0 + NEW = 1 + IN_PROGRESS_GOOGLE_SUPPORT = 2 + ACTION_REQUIRED = 3 + SOLUTION_PROVIDED = 4 + CLOSED = 5 + + class Priority(proto.Enum): + r"""The case Priority. P0 is most urgent and P4 the least. + + Values: + PRIORITY_UNSPECIFIED (0): + Priority is undefined or has not been set + yet. + P0 (1): + Extreme impact on a production service. + Service is hard down. + P1 (2): + Critical impact on a production service. + Service is currently unusable. + P2 (3): + Severe impact on a production service. + Service is usable but greatly impaired. + P3 (4): + Medium impact on a production service. + Service is available, but moderately impaired. + P4 (5): + General questions or minor issues. + Production service is fully available. + """ + PRIORITY_UNSPECIFIED = 0 + P0 = 1 + P1 = 2 + P2 = 3 + P3 = 4 + P4 = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + classification: "CaseClassification" = proto.Field( + proto.MESSAGE, + number=4, + message="CaseClassification", + ) + time_zone: str = proto.Field( + proto.STRING, + number=8, + ) + subscriber_email_addresses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) + state: State = proto.Field( + proto.ENUM, + number=12, + enum=State, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + creator: actor.Actor = proto.Field( + proto.MESSAGE, + number=15, + message=actor.Actor, + ) + contact_email: str = proto.Field( + proto.STRING, + number=35, + ) + escalated: bool = proto.Field( + proto.BOOL, + number=17, + ) + test_case: bool = proto.Field( + proto.BOOL, + number=19, + ) + language_code: str = proto.Field( + proto.STRING, + number=23, + ) + priority: Priority = proto.Field( + proto.ENUM, + number=32, + enum=Priority, + ) + + +class CaseClassification(proto.Message): + r"""A classification object with a product type and value. + + Attributes: + id (str): + The unique ID for a classification. Must be specified for + case creation. + + To retrieve valid classification IDs for case creation, use + ``caseClassifications.search``. + display_name (str): + The display name of the classification. + """ + + id: str = proto.Field( + proto.STRING, + number=3, + ) + display_name: str = proto.Field( + proto.STRING, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-support/google/cloud/support_v2/types/case_service.py b/packages/google-cloud-support/google/cloud/support_v2/types/case_service.py new file mode 100644 index 000000000000..aa1a57304132 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/types/case_service.py @@ -0,0 +1,403 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.support_v2.types import case as gcs_case +from google.cloud.support_v2.types import escalation as gcs_escalation + +__protobuf__ = proto.module( + package="google.cloud.support.v2", + manifest={ + "GetCaseRequest", + "CreateCaseRequest", + "ListCasesRequest", + "ListCasesResponse", + "SearchCasesRequest", + "SearchCasesResponse", + "EscalateCaseRequest", + "UpdateCaseRequest", + "CloseCaseRequest", + "SearchCaseClassificationsRequest", + "SearchCaseClassificationsResponse", + }, +) + + +class GetCaseRequest(proto.Message): + r"""The request message for the GetCase endpoint. + + Attributes: + name (str): + Required. The fully qualified name of a case + to be retrieved. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateCaseRequest(proto.Message): + r"""The request message for the CreateCase endpoint. + + Attributes: + parent (str): + Required. The name of the Google Cloud + Resource under which the case should be created. + case (google.cloud.support_v2.types.Case): + Required. The case to be created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + case: gcs_case.Case = proto.Field( + proto.MESSAGE, + number=2, + message=gcs_case.Case, + ) + + +class ListCasesRequest(proto.Message): + r"""The request message for the ListCases endpoint. + + Attributes: + parent (str): + Required. The fully qualified name of parent + resource to list cases under. + filter (str): + An expression written in filter language. If non-empty, the + query returns the cases that match the filter. Else, the + query doesn't filter the cases. + + Filter expressions use the following fields with the + operators equals (``=``) and ``AND``: + + - ``state``: The accepted values are ``OPEN`` or + ``CLOSED``. + - ``priority``: The accepted values are ``P0``, ``P1``, + ``P2``, ``P3``, or ``P4``. You can specify multiple + values for priority using the ``OR`` operator. For + example, ``priority=P1 OR priority=P2``. + - ``creator.email``: The email address of the case creator. + + Examples: + + - ``state=CLOSED`` + - ``state=OPEN AND creator.email="tester@example.com"`` + - ``state=OPEN AND (priority=P0 OR priority=P1)`` + page_size (int): + The maximum number of cases fetched with each + request. Defaults to 10. + page_token (str): + A token identifying the page of results to + return. If unspecified, the first page is + retrieved. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListCasesResponse(proto.Message): + r"""The response message for the ListCases endpoint. + + Attributes: + cases (MutableSequence[google.cloud.support_v2.types.Case]): + The list of cases associated with the Google + Cloud Resource, after any filters have been + applied. + next_page_token (str): + A token to retrieve the next page of results. This should be + set in the ``page_token`` field of the subsequent + ``ListCasesRequest`` message that is issued. If unspecified, + there are no more results to retrieve. + """ + + @property + def raw_page(self): + return self + + cases: MutableSequence[gcs_case.Case] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcs_case.Case, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SearchCasesRequest(proto.Message): + r"""The request message for the SearchCases endpoint. + + Attributes: + parent (str): + The fully qualified name of parent resource + to search cases under. + query (str): + An expression written in filter language. + + A query uses the following fields with the operators equals + (``=``) and ``AND``: + + - ``organization``: An organization name in the form + ``organizations/``. + - ``project``: A project name in the form + ``projects/``. + - ``state``: The accepted values are ``OPEN`` or + ``CLOSED``. + - ``priority``: The accepted values are ``P0``, ``P1``, + ``P2``, ``P3``, or ``P4``. You can specify multiple + values for priority using the ``OR`` operator. For + example, ``priority=P1 OR priority=P2``. + - ``creator.email``: The email address of the case creator. + - ``billingAccount``: A billing account in the form + ``billingAccounts/`` + + You must specify either ``organization`` or ``project``. + + To search across ``displayName``, ``description``, and + comments, use a global restriction with no keyword or + operator. For example, ``"my search"``. + + To search only cases updated after a certain date, use + ``update_time`` restricted with that particular date, time, + and timezone in ISO datetime format. For example, + ``update_time>"2020-01-01T00:00:00-05:00"``. ``update_time`` + only supports the greater than operator (``>``). + + Examples: + + - ``organization="organizations/123456789"`` + - ``project="projects/my-project-id"`` + - ``project="projects/123456789"`` + - ``billing_account="billingAccounts/123456-A0B0C0-CUZ789"`` + - ``organization="organizations/123456789" AND state=CLOSED`` + - ``project="projects/my-project-id" AND creator.email="tester@example.com"`` + - ``project="projects/my-project-id" AND (priority=P0 OR priority=P1)`` + page_size (int): + The maximum number of cases fetched with each + request. The default page size is 10. + page_token (str): + A token identifying the page of results to + return. If unspecified, the first page is + retrieved. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + query: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class SearchCasesResponse(proto.Message): + r"""The response message for the SearchCases endpoint. + + Attributes: + cases (MutableSequence[google.cloud.support_v2.types.Case]): + The list of cases associated with the Google + Cloud Resource, after any filters have been + applied. + next_page_token (str): + A token to retrieve the next page of results. This should be + set in the ``page_token`` field of subsequent + ``SearchCaseRequest`` message that is issued. If + unspecified, there are no more results to retrieve. + """ + + @property + def raw_page(self): + return self + + cases: MutableSequence[gcs_case.Case] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcs_case.Case, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class EscalateCaseRequest(proto.Message): + r"""The request message for the EscalateCase endpoint. + + Attributes: + name (str): + Required. The fully qualified name of the + Case resource to be escalated. + escalation (google.cloud.support_v2.types.Escalation): + The escalation object to be sent with the + escalation request. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + escalation: gcs_escalation.Escalation = proto.Field( + proto.MESSAGE, + number=2, + message=gcs_escalation.Escalation, + ) + + +class UpdateCaseRequest(proto.Message): + r"""The request message for the UpdateCase endpoint + + Attributes: + case (google.cloud.support_v2.types.Case): + Required. The case object to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A list of attributes of the case object that should be + updated as part of this request. Supported values are + ``priority``, ``display_name``, and + ``subscriber_email_addresses``. If no fields are specified, + all supported fields are updated. + + WARNING: If you do not provide a field mask, then you might + accidentally clear some fields. For example, if you leave + the field mask empty and do not provide a value for + ``subscriber_email_addresses``, then + ``subscriber_email_addresses`` is updated to empty. + """ + + case: gcs_case.Case = proto.Field( + proto.MESSAGE, + number=1, + message=gcs_case.Case, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class CloseCaseRequest(proto.Message): + r"""The request message for the CloseCase endpoint. + + Attributes: + name (str): + Required. The fully qualified name of the + case resource to be closed. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SearchCaseClassificationsRequest(proto.Message): + r"""The request message for SearchCaseClassifications endpoint. + + Attributes: + query (str): + An expression written in the Google Cloud + filter language. If non-empty, then only cases + whose fields match the filter are returned. If + empty, then no messages are filtered out. + page_size (int): + The maximum number of cases fetched with each + request. + page_token (str): + A token identifying the page of results to + return. If unspecified, the first page is + retrieved. + """ + + query: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class SearchCaseClassificationsResponse(proto.Message): + r"""The response message for SearchCaseClassifications endpoint. + + Attributes: + case_classifications (MutableSequence[google.cloud.support_v2.types.CaseClassification]): + The classifications retrieved. + next_page_token (str): + A token to retrieve the next page of results. This should be + set in the ``page_token`` field of subsequent + ``SearchCaseClassificationsRequest`` message that is issued. + If unspecified, there are no more results to retrieve. + """ + + @property + def raw_page(self): + return self + + case_classifications: MutableSequence[ + gcs_case.CaseClassification + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcs_case.CaseClassification, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-support/google/cloud/support_v2/types/comment.py b/packages/google-cloud-support/google/cloud/support_v2/types/comment.py new file mode 100644 index 000000000000..debe78a7fdd0 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/types/comment.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.support_v2.types import actor + +__protobuf__ = proto.module( + package="google.cloud.support.v2", + manifest={ + "Comment", + }, +) + + +class Comment(proto.Message): + r"""A comment associated with a support case. + + Attributes: + name (str): + Output only. The resource name for the + comment. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when this comment was + created. + creator (google.cloud.support_v2.types.Actor): + Output only. The user or Google Support agent + created this comment. + body (str): + The full comment body. Maximum of 12800 + characters. This can contain rich text syntax. + plain_text_body (str): + Output only. DEPRECATED. An automatically + generated plain text version of body with all + rich text syntax stripped. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + creator: actor.Actor = proto.Field( + proto.MESSAGE, + number=3, + message=actor.Actor, + ) + body: str = proto.Field( + proto.STRING, + number=4, + ) + plain_text_body: str = proto.Field( + proto.STRING, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-support/google/cloud/support_v2/types/comment_service.py b/packages/google-cloud-support/google/cloud/support_v2/types/comment_service.py new file mode 100644 index 000000000000..6a313478fec5 --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/types/comment_service.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.support_v2.types import comment as gcs_comment + +__protobuf__ = proto.module( + package="google.cloud.support.v2", + manifest={ + "ListCommentsRequest", + "ListCommentsResponse", + "CreateCommentRequest", + }, +) + + +class ListCommentsRequest(proto.Message): + r"""The request message for the ListComments endpoint. + + Attributes: + parent (str): + Required. The resource name of Case object + for which comments should be listed. + page_size (int): + The maximum number of comments fetched with + each request. Defaults to 10. + page_token (str): + A token identifying the page of results to + return. If unspecified, the first page is + retrieved. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListCommentsResponse(proto.Message): + r"""The response message for the ListComments endpoint. + + Attributes: + comments (MutableSequence[google.cloud.support_v2.types.Comment]): + The list of Comments associated with the + given Case. + next_page_token (str): + A token to retrieve the next page of results. This should be + set in the ``page_token`` field of subsequent + ``ListCommentsRequest`` message that is issued. If + unspecified, there are no more results to retrieve. + """ + + @property + def raw_page(self): + return self + + comments: MutableSequence[gcs_comment.Comment] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcs_comment.Comment, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateCommentRequest(proto.Message): + r"""The request message for CreateComment endpoint. + + Attributes: + parent (str): + Required. The resource name of Case to which + this comment should be added. + comment (google.cloud.support_v2.types.Comment): + Required. The Comment object to be added to + this Case. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + comment: gcs_comment.Comment = proto.Field( + proto.MESSAGE, + number=2, + message=gcs_comment.Comment, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-support/google/cloud/support_v2/types/escalation.py b/packages/google-cloud-support/google/cloud/support_v2/types/escalation.py new file mode 100644 index 000000000000..53a2b6cb106d --- /dev/null +++ b/packages/google-cloud-support/google/cloud/support_v2/types/escalation.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.support.v2", + manifest={ + "Escalation", + }, +) + + +class Escalation(proto.Message): + r"""An escalation of a support case. + + Attributes: + reason (google.cloud.support_v2.types.Escalation.Reason): + Required. The reason why the Case is being + escalated. + justification (str): + Required. A free text description to accompany the + ``reason`` field above. Provides additional context on why + the case is being escalated. + """ + + class Reason(proto.Enum): + r"""An enum detailing the possible reasons a case may be + escalated. + + Values: + REASON_UNSPECIFIED (0): + The escalation reason is in an unknown state + or has not been specified. + RESOLUTION_TIME (1): + The case is taking too long to resolve. + TECHNICAL_EXPERTISE (2): + The support agent does not have the expertise + required to successfully resolve the issue. + BUSINESS_IMPACT (3): + The issue is having a significant business + impact. + """ + REASON_UNSPECIFIED = 0 + RESOLUTION_TIME = 1 + TECHNICAL_EXPERTISE = 2 + BUSINESS_IMPACT = 3 + + reason: Reason = proto.Field( + proto.ENUM, + number=4, + enum=Reason, + ) + justification: str = proto.Field( + proto.STRING, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-support/mypy.ini b/packages/google-cloud-support/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-support/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-support/noxfile.py b/packages/google-cloud-support/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-cloud-support/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-cloud-support/renovate.json b/packages/google-cloud-support/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-support/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_attachment_service_list_attachments_async.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_attachment_service_list_attachments_async.py new file mode 100644 index 000000000000..6e1b4ac596c6 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_attachment_service_list_attachments_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAttachments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseAttachmentService_ListAttachments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +async def sample_list_attachments(): + # Create a client + client = support_v2.CaseAttachmentServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.ListAttachmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_attachments(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudsupport_v2_generated_CaseAttachmentService_ListAttachments_async] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_attachment_service_list_attachments_sync.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_attachment_service_list_attachments_sync.py new file mode 100644 index 000000000000..33f616c1903b --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_attachment_service_list_attachments_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAttachments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseAttachmentService_ListAttachments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +def sample_list_attachments(): + # Create a client + client = support_v2.CaseAttachmentServiceClient() + + # Initialize request argument(s) + request = support_v2.ListAttachmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_attachments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudsupport_v2_generated_CaseAttachmentService_ListAttachments_sync] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_close_case_async.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_close_case_async.py new file mode 100644 index 000000000000..2b25ffc4aa97 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_close_case_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CloseCase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_CloseCase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +async def sample_close_case(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.CloseCaseRequest( + name="name_value", + ) + + # Make the request + response = await client.close_case(request=request) + + # Handle the response + print(response) + +# [END cloudsupport_v2_generated_CaseService_CloseCase_async] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_close_case_sync.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_close_case_sync.py new file mode 100644 index 000000000000..a9fa4db25147 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_close_case_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CloseCase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_CloseCase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +def sample_close_case(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.CloseCaseRequest( + name="name_value", + ) + + # Make the request + response = client.close_case(request=request) + + # Handle the response + print(response) + +# [END cloudsupport_v2_generated_CaseService_CloseCase_sync] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_create_case_async.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_create_case_async.py new file mode 100644 index 000000000000..563912f95cf7 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_create_case_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_CreateCase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +async def sample_create_case(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.CreateCaseRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_case(request=request) + + # Handle the response + print(response) + +# [END cloudsupport_v2_generated_CaseService_CreateCase_async] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_create_case_sync.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_create_case_sync.py new file mode 100644 index 000000000000..3fe31c3a530a --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_create_case_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_CreateCase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +def sample_create_case(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.CreateCaseRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_case(request=request) + + # Handle the response + print(response) + +# [END cloudsupport_v2_generated_CaseService_CreateCase_sync] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_escalate_case_async.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_escalate_case_async.py new file mode 100644 index 000000000000..3d4469bc7cda --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_escalate_case_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EscalateCase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_EscalateCase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +async def sample_escalate_case(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.EscalateCaseRequest( + name="name_value", + ) + + # Make the request + response = await client.escalate_case(request=request) + + # Handle the response + print(response) + +# [END cloudsupport_v2_generated_CaseService_EscalateCase_async] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_escalate_case_sync.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_escalate_case_sync.py new file mode 100644 index 000000000000..9adc7810fa94 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_escalate_case_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EscalateCase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_EscalateCase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +def sample_escalate_case(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.EscalateCaseRequest( + name="name_value", + ) + + # Make the request + response = client.escalate_case(request=request) + + # Handle the response + print(response) + +# [END cloudsupport_v2_generated_CaseService_EscalateCase_sync] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_get_case_async.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_get_case_async.py new file mode 100644 index 000000000000..350935997442 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_get_case_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_GetCase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +async def sample_get_case(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.GetCaseRequest( + name="name_value", + ) + + # Make the request + response = await client.get_case(request=request) + + # Handle the response + print(response) + +# [END cloudsupport_v2_generated_CaseService_GetCase_async] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_get_case_sync.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_get_case_sync.py new file mode 100644 index 000000000000..426982b637e5 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_get_case_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_GetCase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +def sample_get_case(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.GetCaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_case(request=request) + + # Handle the response + print(response) + +# [END cloudsupport_v2_generated_CaseService_GetCase_sync] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_list_cases_async.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_list_cases_async.py new file mode 100644 index 000000000000..a80b9dbddfe8 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_list_cases_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_ListCases_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +async def sample_list_cases(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.ListCasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cases(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudsupport_v2_generated_CaseService_ListCases_async] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_list_cases_sync.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_list_cases_sync.py new file mode 100644 index 000000000000..2114dd42a0d2 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_list_cases_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_ListCases_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +def sample_list_cases(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.ListCasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cases(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudsupport_v2_generated_CaseService_ListCases_sync] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_search_case_classifications_async.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_search_case_classifications_async.py new file mode 100644 index 000000000000..780151dfe3d0 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_search_case_classifications_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchCaseClassifications +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_SearchCaseClassifications_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +async def sample_search_case_classifications(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.SearchCaseClassificationsRequest( + ) + + # Make the request + page_result = client.search_case_classifications(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudsupport_v2_generated_CaseService_SearchCaseClassifications_async] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_search_case_classifications_sync.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_search_case_classifications_sync.py new file mode 100644 index 000000000000..be4405341225 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_search_case_classifications_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchCaseClassifications +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_SearchCaseClassifications_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +def sample_search_case_classifications(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.SearchCaseClassificationsRequest( + ) + + # Make the request + page_result = client.search_case_classifications(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudsupport_v2_generated_CaseService_SearchCaseClassifications_sync] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_search_cases_async.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_search_cases_async.py new file mode 100644 index 000000000000..a4182ecf2f9c --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_search_cases_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchCases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_SearchCases_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +async def sample_search_cases(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.SearchCasesRequest( + ) + + # Make the request + page_result = client.search_cases(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudsupport_v2_generated_CaseService_SearchCases_async] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_search_cases_sync.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_search_cases_sync.py new file mode 100644 index 000000000000..2c32b68a6a10 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_search_cases_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchCases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_SearchCases_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +def sample_search_cases(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.SearchCasesRequest( + ) + + # Make the request + page_result = client.search_cases(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudsupport_v2_generated_CaseService_SearchCases_sync] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_update_case_async.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_update_case_async.py new file mode 100644 index 000000000000..8a3753735f03 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_update_case_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_UpdateCase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +async def sample_update_case(): + # Create a client + client = support_v2.CaseServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.UpdateCaseRequest( + ) + + # Make the request + response = await client.update_case(request=request) + + # Handle the response + print(response) + +# [END cloudsupport_v2_generated_CaseService_UpdateCase_async] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_update_case_sync.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_update_case_sync.py new file mode 100644 index 000000000000..f879979e8f4b --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_case_service_update_case_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CaseService_UpdateCase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +def sample_update_case(): + # Create a client + client = support_v2.CaseServiceClient() + + # Initialize request argument(s) + request = support_v2.UpdateCaseRequest( + ) + + # Make the request + response = client.update_case(request=request) + + # Handle the response + print(response) + +# [END cloudsupport_v2_generated_CaseService_UpdateCase_sync] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_comment_service_create_comment_async.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_comment_service_create_comment_async.py new file mode 100644 index 000000000000..7e86439e5964 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_comment_service_create_comment_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateComment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CommentService_CreateComment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +async def sample_create_comment(): + # Create a client + client = support_v2.CommentServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.CreateCommentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_comment(request=request) + + # Handle the response + print(response) + +# [END cloudsupport_v2_generated_CommentService_CreateComment_async] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_comment_service_create_comment_sync.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_comment_service_create_comment_sync.py new file mode 100644 index 000000000000..fde442b8520b --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_comment_service_create_comment_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateComment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CommentService_CreateComment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +def sample_create_comment(): + # Create a client + client = support_v2.CommentServiceClient() + + # Initialize request argument(s) + request = support_v2.CreateCommentRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_comment(request=request) + + # Handle the response + print(response) + +# [END cloudsupport_v2_generated_CommentService_CreateComment_sync] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_comment_service_list_comments_async.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_comment_service_list_comments_async.py new file mode 100644 index 000000000000..7a3495965c86 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_comment_service_list_comments_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListComments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CommentService_ListComments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +async def sample_list_comments(): + # Create a client + client = support_v2.CommentServiceAsyncClient() + + # Initialize request argument(s) + request = support_v2.ListCommentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_comments(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudsupport_v2_generated_CommentService_ListComments_async] diff --git a/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_comment_service_list_comments_sync.py b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_comment_service_list_comments_sync.py new file mode 100644 index 000000000000..d972ac1a75fb --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/cloudsupport_v2_generated_comment_service_list_comments_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListComments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-support + + +# [START cloudsupport_v2_generated_CommentService_ListComments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import support_v2 + + +def sample_list_comments(): + # Create a client + client = support_v2.CommentServiceClient() + + # Initialize request argument(s) + request = support_v2.ListCommentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_comments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudsupport_v2_generated_CommentService_ListComments_sync] diff --git a/packages/google-cloud-support/samples/generated_samples/snippet_metadata_google.cloud.support.v2.json b/packages/google-cloud-support/samples/generated_samples/snippet_metadata_google.cloud.support.v2.json new file mode 100644 index 000000000000..26f909a94299 --- /dev/null +++ b/packages/google-cloud-support/samples/generated_samples/snippet_metadata_google.cloud.support.v2.json @@ -0,0 +1,1778 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.support.v2", + "version": "v2" + } + ], + "language": "PYTHON", + "name": "google-cloud-support", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.support_v2.CaseAttachmentServiceAsyncClient", + "shortName": "CaseAttachmentServiceAsyncClient" + }, + "fullName": "google.cloud.support_v2.CaseAttachmentServiceAsyncClient.list_attachments", + "method": { + "fullName": "google.cloud.support.v2.CaseAttachmentService.ListAttachments", + "service": { + "fullName": "google.cloud.support.v2.CaseAttachmentService", + "shortName": "CaseAttachmentService" + }, + "shortName": "ListAttachments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.ListAttachmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.services.case_attachment_service.pagers.ListAttachmentsAsyncPager", + "shortName": "list_attachments" + }, + "description": "Sample for ListAttachments", + "file": "cloudsupport_v2_generated_case_attachment_service_list_attachments_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseAttachmentService_ListAttachments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_attachment_service_list_attachments_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.support_v2.CaseAttachmentServiceClient", + "shortName": "CaseAttachmentServiceClient" + }, + "fullName": "google.cloud.support_v2.CaseAttachmentServiceClient.list_attachments", + "method": { + "fullName": "google.cloud.support.v2.CaseAttachmentService.ListAttachments", + "service": { + "fullName": "google.cloud.support.v2.CaseAttachmentService", + "shortName": "CaseAttachmentService" + }, + "shortName": "ListAttachments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.ListAttachmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.services.case_attachment_service.pagers.ListAttachmentsPager", + "shortName": "list_attachments" + }, + "description": "Sample for ListAttachments", + "file": "cloudsupport_v2_generated_case_attachment_service_list_attachments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseAttachmentService_ListAttachments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_attachment_service_list_attachments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient", + "shortName": "CaseServiceAsyncClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient.close_case", + "method": { + "fullName": "google.cloud.support.v2.CaseService.CloseCase", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "CloseCase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.CloseCaseRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.types.Case", + "shortName": "close_case" + }, + "description": "Sample for CloseCase", + "file": "cloudsupport_v2_generated_case_service_close_case_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_CloseCase_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_close_case_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.support_v2.CaseServiceClient", + "shortName": "CaseServiceClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceClient.close_case", + "method": { + "fullName": "google.cloud.support.v2.CaseService.CloseCase", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "CloseCase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.CloseCaseRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.types.Case", + "shortName": "close_case" + }, + "description": "Sample for CloseCase", + "file": "cloudsupport_v2_generated_case_service_close_case_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_CloseCase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_close_case_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient", + "shortName": "CaseServiceAsyncClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient.create_case", + "method": { + "fullName": "google.cloud.support.v2.CaseService.CreateCase", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "CreateCase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.CreateCaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "case", + "type": "google.cloud.support_v2.types.Case" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.types.Case", + "shortName": "create_case" + }, + "description": "Sample for CreateCase", + "file": "cloudsupport_v2_generated_case_service_create_case_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_CreateCase_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_create_case_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.support_v2.CaseServiceClient", + "shortName": "CaseServiceClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceClient.create_case", + "method": { + "fullName": "google.cloud.support.v2.CaseService.CreateCase", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "CreateCase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.CreateCaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "case", + "type": "google.cloud.support_v2.types.Case" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.types.Case", + "shortName": "create_case" + }, + "description": "Sample for CreateCase", + "file": "cloudsupport_v2_generated_case_service_create_case_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_CreateCase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_create_case_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient", + "shortName": "CaseServiceAsyncClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient.escalate_case", + "method": { + "fullName": "google.cloud.support.v2.CaseService.EscalateCase", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "EscalateCase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.EscalateCaseRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.types.Case", + "shortName": "escalate_case" + }, + "description": "Sample for EscalateCase", + "file": "cloudsupport_v2_generated_case_service_escalate_case_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_EscalateCase_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_escalate_case_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.support_v2.CaseServiceClient", + "shortName": "CaseServiceClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceClient.escalate_case", + "method": { + "fullName": "google.cloud.support.v2.CaseService.EscalateCase", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "EscalateCase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.EscalateCaseRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.types.Case", + "shortName": "escalate_case" + }, + "description": "Sample for EscalateCase", + "file": "cloudsupport_v2_generated_case_service_escalate_case_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_EscalateCase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_escalate_case_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient", + "shortName": "CaseServiceAsyncClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient.get_case", + "method": { + "fullName": "google.cloud.support.v2.CaseService.GetCase", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "GetCase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.GetCaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.types.Case", + "shortName": "get_case" + }, + "description": "Sample for GetCase", + "file": "cloudsupport_v2_generated_case_service_get_case_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_GetCase_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_get_case_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.support_v2.CaseServiceClient", + "shortName": "CaseServiceClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceClient.get_case", + "method": { + "fullName": "google.cloud.support.v2.CaseService.GetCase", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "GetCase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.GetCaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.types.Case", + "shortName": "get_case" + }, + "description": "Sample for GetCase", + "file": "cloudsupport_v2_generated_case_service_get_case_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_GetCase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_get_case_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient", + "shortName": "CaseServiceAsyncClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient.list_cases", + "method": { + "fullName": "google.cloud.support.v2.CaseService.ListCases", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "ListCases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.ListCasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.services.case_service.pagers.ListCasesAsyncPager", + "shortName": "list_cases" + }, + "description": "Sample for ListCases", + "file": "cloudsupport_v2_generated_case_service_list_cases_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_ListCases_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_list_cases_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.support_v2.CaseServiceClient", + "shortName": "CaseServiceClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceClient.list_cases", + "method": { + "fullName": "google.cloud.support.v2.CaseService.ListCases", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "ListCases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.ListCasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.services.case_service.pagers.ListCasesPager", + "shortName": "list_cases" + }, + "description": "Sample for ListCases", + "file": "cloudsupport_v2_generated_case_service_list_cases_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_ListCases_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_list_cases_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient", + "shortName": "CaseServiceAsyncClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient.search_case_classifications", + "method": { + "fullName": "google.cloud.support.v2.CaseService.SearchCaseClassifications", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "SearchCaseClassifications" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.SearchCaseClassificationsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.services.case_service.pagers.SearchCaseClassificationsAsyncPager", + "shortName": "search_case_classifications" + }, + "description": "Sample for SearchCaseClassifications", + "file": "cloudsupport_v2_generated_case_service_search_case_classifications_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_SearchCaseClassifications_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_search_case_classifications_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.support_v2.CaseServiceClient", + "shortName": "CaseServiceClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceClient.search_case_classifications", + "method": { + "fullName": "google.cloud.support.v2.CaseService.SearchCaseClassifications", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "SearchCaseClassifications" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.SearchCaseClassificationsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.services.case_service.pagers.SearchCaseClassificationsPager", + "shortName": "search_case_classifications" + }, + "description": "Sample for SearchCaseClassifications", + "file": "cloudsupport_v2_generated_case_service_search_case_classifications_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_SearchCaseClassifications_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_search_case_classifications_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient", + "shortName": "CaseServiceAsyncClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient.search_cases", + "method": { + "fullName": "google.cloud.support.v2.CaseService.SearchCases", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "SearchCases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.SearchCasesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.services.case_service.pagers.SearchCasesAsyncPager", + "shortName": "search_cases" + }, + "description": "Sample for SearchCases", + "file": "cloudsupport_v2_generated_case_service_search_cases_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_SearchCases_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_search_cases_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.support_v2.CaseServiceClient", + "shortName": "CaseServiceClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceClient.search_cases", + "method": { + "fullName": "google.cloud.support.v2.CaseService.SearchCases", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "SearchCases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.SearchCasesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.services.case_service.pagers.SearchCasesPager", + "shortName": "search_cases" + }, + "description": "Sample for SearchCases", + "file": "cloudsupport_v2_generated_case_service_search_cases_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_SearchCases_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_search_cases_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient", + "shortName": "CaseServiceAsyncClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceAsyncClient.update_case", + "method": { + "fullName": "google.cloud.support.v2.CaseService.UpdateCase", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "UpdateCase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.UpdateCaseRequest" + }, + { + "name": "case", + "type": "google.cloud.support_v2.types.Case" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.types.Case", + "shortName": "update_case" + }, + "description": "Sample for UpdateCase", + "file": "cloudsupport_v2_generated_case_service_update_case_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_UpdateCase_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_update_case_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.support_v2.CaseServiceClient", + "shortName": "CaseServiceClient" + }, + "fullName": "google.cloud.support_v2.CaseServiceClient.update_case", + "method": { + "fullName": "google.cloud.support.v2.CaseService.UpdateCase", + "service": { + "fullName": "google.cloud.support.v2.CaseService", + "shortName": "CaseService" + }, + "shortName": "UpdateCase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.UpdateCaseRequest" + }, + { + "name": "case", + "type": "google.cloud.support_v2.types.Case" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.types.Case", + "shortName": "update_case" + }, + "description": "Sample for UpdateCase", + "file": "cloudsupport_v2_generated_case_service_update_case_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CaseService_UpdateCase_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_case_service_update_case_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.support_v2.CommentServiceAsyncClient", + "shortName": "CommentServiceAsyncClient" + }, + "fullName": "google.cloud.support_v2.CommentServiceAsyncClient.create_comment", + "method": { + "fullName": "google.cloud.support.v2.CommentService.CreateComment", + "service": { + "fullName": "google.cloud.support.v2.CommentService", + "shortName": "CommentService" + }, + "shortName": "CreateComment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.CreateCommentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "comment", + "type": "google.cloud.support_v2.types.Comment" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.types.Comment", + "shortName": "create_comment" + }, + "description": "Sample for CreateComment", + "file": "cloudsupport_v2_generated_comment_service_create_comment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CommentService_CreateComment_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_comment_service_create_comment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.support_v2.CommentServiceClient", + "shortName": "CommentServiceClient" + }, + "fullName": "google.cloud.support_v2.CommentServiceClient.create_comment", + "method": { + "fullName": "google.cloud.support.v2.CommentService.CreateComment", + "service": { + "fullName": "google.cloud.support.v2.CommentService", + "shortName": "CommentService" + }, + "shortName": "CreateComment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.CreateCommentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "comment", + "type": "google.cloud.support_v2.types.Comment" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.types.Comment", + "shortName": "create_comment" + }, + "description": "Sample for CreateComment", + "file": "cloudsupport_v2_generated_comment_service_create_comment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CommentService_CreateComment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_comment_service_create_comment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.support_v2.CommentServiceAsyncClient", + "shortName": "CommentServiceAsyncClient" + }, + "fullName": "google.cloud.support_v2.CommentServiceAsyncClient.list_comments", + "method": { + "fullName": "google.cloud.support.v2.CommentService.ListComments", + "service": { + "fullName": "google.cloud.support.v2.CommentService", + "shortName": "CommentService" + }, + "shortName": "ListComments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.ListCommentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.services.comment_service.pagers.ListCommentsAsyncPager", + "shortName": "list_comments" + }, + "description": "Sample for ListComments", + "file": "cloudsupport_v2_generated_comment_service_list_comments_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CommentService_ListComments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_comment_service_list_comments_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.support_v2.CommentServiceClient", + "shortName": "CommentServiceClient" + }, + "fullName": "google.cloud.support_v2.CommentServiceClient.list_comments", + "method": { + "fullName": "google.cloud.support.v2.CommentService.ListComments", + "service": { + "fullName": "google.cloud.support.v2.CommentService", + "shortName": "CommentService" + }, + "shortName": "ListComments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.support_v2.types.ListCommentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.support_v2.services.comment_service.pagers.ListCommentsPager", + "shortName": "list_comments" + }, + "description": "Sample for ListComments", + "file": "cloudsupport_v2_generated_comment_service_list_comments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsupport_v2_generated_CommentService_ListComments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsupport_v2_generated_comment_service_list_comments_sync.py" + } + ] +} diff --git a/packages/google-cloud-support/scripts/decrypt-secrets.sh b/packages/google-cloud-support/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-cloud-support/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-support/scripts/fixup_support_v2_keywords.py b/packages/google-cloud-support/scripts/fixup_support_v2_keywords.py new file mode 100644 index 000000000000..f2fded6c0c84 --- /dev/null +++ b/packages/google-cloud-support/scripts/fixup_support_v2_keywords.py @@ -0,0 +1,186 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class supportCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'close_case': ('name', ), + 'create_case': ('parent', 'case', ), + 'create_comment': ('parent', 'comment', ), + 'escalate_case': ('name', 'escalation', ), + 'get_case': ('name', ), + 'list_attachments': ('parent', 'page_size', 'page_token', ), + 'list_cases': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_comments': ('parent', 'page_size', 'page_token', ), + 'search_case_classifications': ('query', 'page_size', 'page_token', ), + 'search_cases': ('parent', 'query', 'page_size', 'page_token', ), + 'update_case': ('case', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=supportCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the support client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-support/setup.py b/packages/google-cloud-support/setup.py new file mode 100644 index 000000000000..8bdb4d107009 --- /dev/null +++ b/packages/google-cloud-support/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-support" + + +description = "Google Cloud Support API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/support/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-support/testing/.gitignore b/packages/google-cloud-support/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-support/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-support/testing/constraints-3.10.txt b/packages/google-cloud-support/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-support/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-support/testing/constraints-3.11.txt b/packages/google-cloud-support/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-support/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-support/testing/constraints-3.12.txt b/packages/google-cloud-support/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-support/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-support/testing/constraints-3.7.txt b/packages/google-cloud-support/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-support/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-support/testing/constraints-3.8.txt b/packages/google-cloud-support/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-support/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-support/testing/constraints-3.9.txt b/packages/google-cloud-support/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-support/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-support/tests/__init__.py b/packages/google-cloud-support/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-support/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-support/tests/unit/__init__.py b/packages/google-cloud-support/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-support/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-support/tests/unit/gapic/__init__.py b/packages/google-cloud-support/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-support/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-support/tests/unit/gapic/support_v2/__init__.py b/packages/google-cloud-support/tests/unit/gapic/support_v2/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-support/tests/unit/gapic/support_v2/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_attachment_service.py b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_attachment_service.py new file mode 100644 index 000000000000..ff2206fc2bb1 --- /dev/null +++ b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_attachment_service.py @@ -0,0 +1,2303 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.support_v2.services.case_attachment_service import ( + CaseAttachmentServiceAsyncClient, + CaseAttachmentServiceClient, + pagers, + transports, +) +from google.cloud.support_v2.types import attachment, attachment_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CaseAttachmentServiceClient._get_default_mtls_endpoint(None) is None + assert ( + CaseAttachmentServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + CaseAttachmentServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CaseAttachmentServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CaseAttachmentServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CaseAttachmentServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CaseAttachmentServiceClient, "grpc"), + (CaseAttachmentServiceAsyncClient, "grpc_asyncio"), + (CaseAttachmentServiceClient, "rest"), + ], +) +def test_case_attachment_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudsupport.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsupport.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CaseAttachmentServiceGrpcTransport, "grpc"), + (transports.CaseAttachmentServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CaseAttachmentServiceRestTransport, "rest"), + ], +) +def test_case_attachment_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CaseAttachmentServiceClient, "grpc"), + (CaseAttachmentServiceAsyncClient, "grpc_asyncio"), + (CaseAttachmentServiceClient, "rest"), + ], +) +def test_case_attachment_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudsupport.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsupport.googleapis.com" + ) + + +def test_case_attachment_service_client_get_transport_class(): + transport = CaseAttachmentServiceClient.get_transport_class() + available_transports = [ + transports.CaseAttachmentServiceGrpcTransport, + transports.CaseAttachmentServiceRestTransport, + ] + assert transport in available_transports + + transport = CaseAttachmentServiceClient.get_transport_class("grpc") + assert transport == transports.CaseAttachmentServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + CaseAttachmentServiceClient, + transports.CaseAttachmentServiceGrpcTransport, + "grpc", + ), + ( + CaseAttachmentServiceAsyncClient, + transports.CaseAttachmentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + CaseAttachmentServiceClient, + transports.CaseAttachmentServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + CaseAttachmentServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CaseAttachmentServiceClient), +) +@mock.patch.object( + CaseAttachmentServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CaseAttachmentServiceAsyncClient), +) +def test_case_attachment_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CaseAttachmentServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CaseAttachmentServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + CaseAttachmentServiceClient, + transports.CaseAttachmentServiceGrpcTransport, + "grpc", + "true", + ), + ( + CaseAttachmentServiceAsyncClient, + transports.CaseAttachmentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + CaseAttachmentServiceClient, + transports.CaseAttachmentServiceGrpcTransport, + "grpc", + "false", + ), + ( + CaseAttachmentServiceAsyncClient, + transports.CaseAttachmentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + CaseAttachmentServiceClient, + transports.CaseAttachmentServiceRestTransport, + "rest", + "true", + ), + ( + CaseAttachmentServiceClient, + transports.CaseAttachmentServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + CaseAttachmentServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CaseAttachmentServiceClient), +) +@mock.patch.object( + CaseAttachmentServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CaseAttachmentServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_case_attachment_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [CaseAttachmentServiceClient, CaseAttachmentServiceAsyncClient] +) +@mock.patch.object( + CaseAttachmentServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CaseAttachmentServiceClient), +) +@mock.patch.object( + CaseAttachmentServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CaseAttachmentServiceAsyncClient), +) +def test_case_attachment_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + CaseAttachmentServiceClient, + transports.CaseAttachmentServiceGrpcTransport, + "grpc", + ), + ( + CaseAttachmentServiceAsyncClient, + transports.CaseAttachmentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + CaseAttachmentServiceClient, + transports.CaseAttachmentServiceRestTransport, + "rest", + ), + ], +) +def test_case_attachment_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CaseAttachmentServiceClient, + transports.CaseAttachmentServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CaseAttachmentServiceAsyncClient, + transports.CaseAttachmentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + CaseAttachmentServiceClient, + transports.CaseAttachmentServiceRestTransport, + "rest", + None, + ), + ], +) +def test_case_attachment_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_case_attachment_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.support_v2.services.case_attachment_service.transports.CaseAttachmentServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CaseAttachmentServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CaseAttachmentServiceClient, + transports.CaseAttachmentServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CaseAttachmentServiceAsyncClient, + transports.CaseAttachmentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_case_attachment_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudsupport.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="cloudsupport.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + attachment_service.ListAttachmentsRequest, + dict, + ], +) +def test_list_attachments(request_type, transport: str = "grpc"): + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = attachment_service.ListAttachmentsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_attachments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == attachment_service.ListAttachmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAttachmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_attachments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + client.list_attachments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == attachment_service.ListAttachmentsRequest() + + +@pytest.mark.asyncio +async def test_list_attachments_async( + transport: str = "grpc_asyncio", + request_type=attachment_service.ListAttachmentsRequest, +): + client = CaseAttachmentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + attachment_service.ListAttachmentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_attachments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == attachment_service.ListAttachmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAttachmentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_attachments_async_from_dict(): + await test_list_attachments_async(request_type=dict) + + +def test_list_attachments_field_headers(): + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = attachment_service.ListAttachmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + call.return_value = attachment_service.ListAttachmentsResponse() + client.list_attachments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_attachments_field_headers_async(): + client = CaseAttachmentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = attachment_service.ListAttachmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + attachment_service.ListAttachmentsResponse() + ) + await client.list_attachments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_attachments_flattened(): + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = attachment_service.ListAttachmentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_attachments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_attachments_flattened_error(): + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_attachments( + attachment_service.ListAttachmentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_attachments_flattened_async(): + client = CaseAttachmentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = attachment_service.ListAttachmentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + attachment_service.ListAttachmentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_attachments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_attachments_flattened_error_async(): + client = CaseAttachmentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_attachments( + attachment_service.ListAttachmentsRequest(), + parent="parent_value", + ) + + +def test_list_attachments_pager(transport_name: str = "grpc"): + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + attachment_service.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + attachment.Attachment(), + ], + next_page_token="abc", + ), + attachment_service.ListAttachmentsResponse( + attachments=[], + next_page_token="def", + ), + attachment_service.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + ], + next_page_token="ghi", + ), + attachment_service.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_attachments(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, attachment.Attachment) for i in results) + + +def test_list_attachments_pages(transport_name: str = "grpc"): + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + attachment_service.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + attachment.Attachment(), + ], + next_page_token="abc", + ), + attachment_service.ListAttachmentsResponse( + attachments=[], + next_page_token="def", + ), + attachment_service.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + ], + next_page_token="ghi", + ), + attachment_service.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_attachments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_attachments_async_pager(): + client = CaseAttachmentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_attachments), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + attachment_service.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + attachment.Attachment(), + ], + next_page_token="abc", + ), + attachment_service.ListAttachmentsResponse( + attachments=[], + next_page_token="def", + ), + attachment_service.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + ], + next_page_token="ghi", + ), + attachment_service.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_attachments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, attachment.Attachment) for i in responses) + + +@pytest.mark.asyncio +async def test_list_attachments_async_pages(): + client = CaseAttachmentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_attachments), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + attachment_service.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + attachment.Attachment(), + ], + next_page_token="abc", + ), + attachment_service.ListAttachmentsResponse( + attachments=[], + next_page_token="def", + ), + attachment_service.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + ], + next_page_token="ghi", + ), + attachment_service.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_attachments(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + attachment_service.ListAttachmentsRequest, + dict, + ], +) +def test_list_attachments_rest(request_type): + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/cases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = attachment_service.ListAttachmentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = attachment_service.ListAttachmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_attachments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAttachmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_attachments_rest_required_fields( + request_type=attachment_service.ListAttachmentsRequest, +): + transport_class = transports.CaseAttachmentServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_attachments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_attachments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = attachment_service.ListAttachmentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = attachment_service.ListAttachmentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_attachments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_attachments_rest_unset_required_fields(): + transport = transports.CaseAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_attachments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_attachments_rest_interceptors(null_interceptor): + transport = transports.CaseAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CaseAttachmentServiceRestInterceptor(), + ) + client = CaseAttachmentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CaseAttachmentServiceRestInterceptor, "post_list_attachments" + ) as post, mock.patch.object( + transports.CaseAttachmentServiceRestInterceptor, "pre_list_attachments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = attachment_service.ListAttachmentsRequest.pb( + attachment_service.ListAttachmentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = attachment_service.ListAttachmentsResponse.to_json( + attachment_service.ListAttachmentsResponse() + ) + + request = attachment_service.ListAttachmentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = attachment_service.ListAttachmentsResponse() + + client.list_attachments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_attachments_rest_bad_request( + transport: str = "rest", request_type=attachment_service.ListAttachmentsRequest +): + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/cases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_attachments(request) + + +def test_list_attachments_rest_flattened(): + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = attachment_service.ListAttachmentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/cases/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = attachment_service.ListAttachmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_attachments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/cases/*}/attachments" % client.transport._host, + args[1], + ) + + +def test_list_attachments_rest_flattened_error(transport: str = "rest"): + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_attachments( + attachment_service.ListAttachmentsRequest(), + parent="parent_value", + ) + + +def test_list_attachments_rest_pager(transport: str = "rest"): + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + attachment_service.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + attachment.Attachment(), + ], + next_page_token="abc", + ), + attachment_service.ListAttachmentsResponse( + attachments=[], + next_page_token="def", + ), + attachment_service.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + ], + next_page_token="ghi", + ), + attachment_service.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + attachment_service.ListAttachmentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/cases/sample2"} + + pager = client.list_attachments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, attachment.Attachment) for i in results) + + pages = list(client.list_attachments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CaseAttachmentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CaseAttachmentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CaseAttachmentServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CaseAttachmentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CaseAttachmentServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CaseAttachmentServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CaseAttachmentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CaseAttachmentServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CaseAttachmentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CaseAttachmentServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CaseAttachmentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CaseAttachmentServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CaseAttachmentServiceGrpcTransport, + transports.CaseAttachmentServiceGrpcAsyncIOTransport, + transports.CaseAttachmentServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CaseAttachmentServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CaseAttachmentServiceGrpcTransport, + ) + + +def test_case_attachment_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CaseAttachmentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_case_attachment_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.support_v2.services.case_attachment_service.transports.CaseAttachmentServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CaseAttachmentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("list_attachments",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_case_attachment_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.support_v2.services.case_attachment_service.transports.CaseAttachmentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CaseAttachmentServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_case_attachment_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.support_v2.services.case_attachment_service.transports.CaseAttachmentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CaseAttachmentServiceTransport() + adc.assert_called_once() + + +def test_case_attachment_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CaseAttachmentServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CaseAttachmentServiceGrpcTransport, + transports.CaseAttachmentServiceGrpcAsyncIOTransport, + ], +) +def test_case_attachment_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CaseAttachmentServiceGrpcTransport, + transports.CaseAttachmentServiceGrpcAsyncIOTransport, + transports.CaseAttachmentServiceRestTransport, + ], +) +def test_case_attachment_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CaseAttachmentServiceGrpcTransport, grpc_helpers), + (transports.CaseAttachmentServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_case_attachment_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "cloudsupport.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="cloudsupport.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CaseAttachmentServiceGrpcTransport, + transports.CaseAttachmentServiceGrpcAsyncIOTransport, + ], +) +def test_case_attachment_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_case_attachment_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CaseAttachmentServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_case_attachment_service_host_no_port(transport_name): + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudsupport.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudsupport.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsupport.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_case_attachment_service_host_with_port(transport_name): + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudsupport.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudsupport.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsupport.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_case_attachment_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CaseAttachmentServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CaseAttachmentServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_attachments._session + session2 = client2.transport.list_attachments._session + assert session1 != session2 + + +def test_case_attachment_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CaseAttachmentServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_case_attachment_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CaseAttachmentServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CaseAttachmentServiceGrpcTransport, + transports.CaseAttachmentServiceGrpcAsyncIOTransport, + ], +) +def test_case_attachment_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CaseAttachmentServiceGrpcTransport, + transports.CaseAttachmentServiceGrpcAsyncIOTransport, + ], +) +def test_case_attachment_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_attachment_path(): + organization = "squid" + case = "clam" + attachment_id = "whelk" + expected = ( + "organizations/{organization}/cases/{case}/attachments/{attachment_id}".format( + organization=organization, + case=case, + attachment_id=attachment_id, + ) + ) + actual = CaseAttachmentServiceClient.attachment_path( + organization, case, attachment_id + ) + assert expected == actual + + +def test_parse_attachment_path(): + expected = { + "organization": "octopus", + "case": "oyster", + "attachment_id": "nudibranch", + } + path = CaseAttachmentServiceClient.attachment_path(**expected) + + # Check that the path construction is reversible. + actual = CaseAttachmentServiceClient.parse_attachment_path(path) + assert expected == actual + + +def test_case_path(): + organization = "cuttlefish" + case = "mussel" + expected = "organizations/{organization}/cases/{case}".format( + organization=organization, + case=case, + ) + actual = CaseAttachmentServiceClient.case_path(organization, case) + assert expected == actual + + +def test_parse_case_path(): + expected = { + "organization": "winkle", + "case": "nautilus", + } + path = CaseAttachmentServiceClient.case_path(**expected) + + # Check that the path construction is reversible. + actual = CaseAttachmentServiceClient.parse_case_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CaseAttachmentServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = CaseAttachmentServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CaseAttachmentServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = CaseAttachmentServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = CaseAttachmentServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CaseAttachmentServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = CaseAttachmentServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = CaseAttachmentServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CaseAttachmentServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = CaseAttachmentServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = CaseAttachmentServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CaseAttachmentServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = CaseAttachmentServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = CaseAttachmentServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CaseAttachmentServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CaseAttachmentServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CaseAttachmentServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CaseAttachmentServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CaseAttachmentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = CaseAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CaseAttachmentServiceClient, transports.CaseAttachmentServiceGrpcTransport), + ( + CaseAttachmentServiceAsyncClient, + transports.CaseAttachmentServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_service.py b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_service.py new file mode 100644 index 000000000000..63d3ff158102 --- /dev/null +++ b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_case_service.py @@ -0,0 +1,5813 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.support_v2.services.case_service import ( + CaseServiceAsyncClient, + CaseServiceClient, + pagers, + transports, +) +from google.cloud.support_v2.types import actor +from google.cloud.support_v2.types import case +from google.cloud.support_v2.types import case as gcs_case +from google.cloud.support_v2.types import case_service, escalation + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CaseServiceClient._get_default_mtls_endpoint(None) is None + assert ( + CaseServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + CaseServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CaseServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CaseServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert CaseServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CaseServiceClient, "grpc"), + (CaseServiceAsyncClient, "grpc_asyncio"), + (CaseServiceClient, "rest"), + ], +) +def test_case_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudsupport.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsupport.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CaseServiceGrpcTransport, "grpc"), + (transports.CaseServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CaseServiceRestTransport, "rest"), + ], +) +def test_case_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CaseServiceClient, "grpc"), + (CaseServiceAsyncClient, "grpc_asyncio"), + (CaseServiceClient, "rest"), + ], +) +def test_case_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudsupport.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsupport.googleapis.com" + ) + + +def test_case_service_client_get_transport_class(): + transport = CaseServiceClient.get_transport_class() + available_transports = [ + transports.CaseServiceGrpcTransport, + transports.CaseServiceRestTransport, + ] + assert transport in available_transports + + transport = CaseServiceClient.get_transport_class("grpc") + assert transport == transports.CaseServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CaseServiceClient, transports.CaseServiceGrpcTransport, "grpc"), + ( + CaseServiceAsyncClient, + transports.CaseServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CaseServiceClient, transports.CaseServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + CaseServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CaseServiceClient) +) +@mock.patch.object( + CaseServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CaseServiceAsyncClient), +) +def test_case_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CaseServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CaseServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CaseServiceClient, transports.CaseServiceGrpcTransport, "grpc", "true"), + ( + CaseServiceAsyncClient, + transports.CaseServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CaseServiceClient, transports.CaseServiceGrpcTransport, "grpc", "false"), + ( + CaseServiceAsyncClient, + transports.CaseServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (CaseServiceClient, transports.CaseServiceRestTransport, "rest", "true"), + (CaseServiceClient, transports.CaseServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + CaseServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CaseServiceClient) +) +@mock.patch.object( + CaseServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CaseServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_case_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [CaseServiceClient, CaseServiceAsyncClient]) +@mock.patch.object( + CaseServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CaseServiceClient) +) +@mock.patch.object( + CaseServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CaseServiceAsyncClient), +) +def test_case_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CaseServiceClient, transports.CaseServiceGrpcTransport, "grpc"), + ( + CaseServiceAsyncClient, + transports.CaseServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CaseServiceClient, transports.CaseServiceRestTransport, "rest"), + ], +) +def test_case_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (CaseServiceClient, transports.CaseServiceGrpcTransport, "grpc", grpc_helpers), + ( + CaseServiceAsyncClient, + transports.CaseServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (CaseServiceClient, transports.CaseServiceRestTransport, "rest", None), + ], +) +def test_case_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_case_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.support_v2.services.case_service.transports.CaseServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CaseServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (CaseServiceClient, transports.CaseServiceGrpcTransport, "grpc", grpc_helpers), + ( + CaseServiceAsyncClient, + transports.CaseServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_case_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudsupport.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="cloudsupport.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.GetCaseRequest, + dict, + ], +) +def test_get_case(request_type, transport: str = "grpc"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = case.Case( + name="name_value", + display_name="display_name_value", + description="description_value", + time_zone="time_zone_value", + subscriber_email_addresses=["subscriber_email_addresses_value"], + state=case.Case.State.NEW, + contact_email="contact_email_value", + escalated=True, + test_case=True, + language_code="language_code_value", + priority=case.Case.Priority.P0, + ) + response = client.get_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.GetCaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, case.Case) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.time_zone == "time_zone_value" + assert response.subscriber_email_addresses == ["subscriber_email_addresses_value"] + assert response.state == case.Case.State.NEW + assert response.contact_email == "contact_email_value" + assert response.escalated is True + assert response.test_case is True + assert response.language_code == "language_code_value" + assert response.priority == case.Case.Priority.P0 + + +def test_get_case_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_case), "__call__") as call: + client.get_case() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.GetCaseRequest() + + +@pytest.mark.asyncio +async def test_get_case_async( + transport: str = "grpc_asyncio", request_type=case_service.GetCaseRequest +): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + case.Case( + name="name_value", + display_name="display_name_value", + description="description_value", + time_zone="time_zone_value", + subscriber_email_addresses=["subscriber_email_addresses_value"], + state=case.Case.State.NEW, + contact_email="contact_email_value", + escalated=True, + test_case=True, + language_code="language_code_value", + priority=case.Case.Priority.P0, + ) + ) + response = await client.get_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.GetCaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, case.Case) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.time_zone == "time_zone_value" + assert response.subscriber_email_addresses == ["subscriber_email_addresses_value"] + assert response.state == case.Case.State.NEW + assert response.contact_email == "contact_email_value" + assert response.escalated is True + assert response.test_case is True + assert response.language_code == "language_code_value" + assert response.priority == case.Case.Priority.P0 + + +@pytest.mark.asyncio +async def test_get_case_async_from_dict(): + await test_get_case_async(request_type=dict) + + +def test_get_case_field_headers(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = case_service.GetCaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_case), "__call__") as call: + call.return_value = case.Case() + client.get_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_case_field_headers_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = case_service.GetCaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_case), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(case.Case()) + await client.get_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_case_flattened(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = case.Case() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_case( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_case_flattened_error(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_case( + case_service.GetCaseRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_case_flattened_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = case.Case() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(case.Case()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_case( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_case_flattened_error_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_case( + case_service.GetCaseRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.ListCasesRequest, + dict, + ], +) +def test_list_cases(request_type, transport: str = "grpc"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_cases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = case_service.ListCasesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_cases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.ListCasesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCasesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_cases_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_cases), "__call__") as call: + client.list_cases() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.ListCasesRequest() + + +@pytest.mark.asyncio +async def test_list_cases_async( + transport: str = "grpc_asyncio", request_type=case_service.ListCasesRequest +): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_cases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + case_service.ListCasesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_cases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.ListCasesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCasesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_cases_async_from_dict(): + await test_list_cases_async(request_type=dict) + + +def test_list_cases_field_headers(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = case_service.ListCasesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_cases), "__call__") as call: + call.return_value = case_service.ListCasesResponse() + client.list_cases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_cases_field_headers_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = case_service.ListCasesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_cases), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + case_service.ListCasesResponse() + ) + await client.list_cases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_cases_flattened(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_cases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = case_service.ListCasesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_cases( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_cases_flattened_error(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_cases( + case_service.ListCasesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_cases_flattened_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_cases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = case_service.ListCasesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + case_service.ListCasesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_cases( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_cases_flattened_error_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_cases( + case_service.ListCasesRequest(), + parent="parent_value", + ) + + +def test_list_cases_pager(transport_name: str = "grpc"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_cases), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + case_service.ListCasesResponse( + cases=[ + case.Case(), + case.Case(), + case.Case(), + ], + next_page_token="abc", + ), + case_service.ListCasesResponse( + cases=[], + next_page_token="def", + ), + case_service.ListCasesResponse( + cases=[ + case.Case(), + ], + next_page_token="ghi", + ), + case_service.ListCasesResponse( + cases=[ + case.Case(), + case.Case(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_cases(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, case.Case) for i in results) + + +def test_list_cases_pages(transport_name: str = "grpc"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_cases), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + case_service.ListCasesResponse( + cases=[ + case.Case(), + case.Case(), + case.Case(), + ], + next_page_token="abc", + ), + case_service.ListCasesResponse( + cases=[], + next_page_token="def", + ), + case_service.ListCasesResponse( + cases=[ + case.Case(), + ], + next_page_token="ghi", + ), + case_service.ListCasesResponse( + cases=[ + case.Case(), + case.Case(), + ], + ), + RuntimeError, + ) + pages = list(client.list_cases(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_cases_async_pager(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cases), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + case_service.ListCasesResponse( + cases=[ + case.Case(), + case.Case(), + case.Case(), + ], + next_page_token="abc", + ), + case_service.ListCasesResponse( + cases=[], + next_page_token="def", + ), + case_service.ListCasesResponse( + cases=[ + case.Case(), + ], + next_page_token="ghi", + ), + case_service.ListCasesResponse( + cases=[ + case.Case(), + case.Case(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_cases( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, case.Case) for i in responses) + + +@pytest.mark.asyncio +async def test_list_cases_async_pages(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cases), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + case_service.ListCasesResponse( + cases=[ + case.Case(), + case.Case(), + case.Case(), + ], + next_page_token="abc", + ), + case_service.ListCasesResponse( + cases=[], + next_page_token="def", + ), + case_service.ListCasesResponse( + cases=[ + case.Case(), + ], + next_page_token="ghi", + ), + case_service.ListCasesResponse( + cases=[ + case.Case(), + case.Case(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_cases(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.SearchCasesRequest, + dict, + ], +) +def test_search_cases(request_type, transport: str = "grpc"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_cases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = case_service.SearchCasesResponse( + next_page_token="next_page_token_value", + ) + response = client.search_cases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.SearchCasesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchCasesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_search_cases_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_cases), "__call__") as call: + client.search_cases() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.SearchCasesRequest() + + +@pytest.mark.asyncio +async def test_search_cases_async( + transport: str = "grpc_asyncio", request_type=case_service.SearchCasesRequest +): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_cases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + case_service.SearchCasesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.search_cases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.SearchCasesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchCasesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_search_cases_async_from_dict(): + await test_search_cases_async(request_type=dict) + + +def test_search_cases_field_headers(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = case_service.SearchCasesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_cases), "__call__") as call: + call.return_value = case_service.SearchCasesResponse() + client.search_cases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_search_cases_field_headers_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = case_service.SearchCasesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_cases), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + case_service.SearchCasesResponse() + ) + await client.search_cases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_search_cases_pager(transport_name: str = "grpc"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_cases), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + case_service.SearchCasesResponse( + cases=[ + case.Case(), + case.Case(), + case.Case(), + ], + next_page_token="abc", + ), + case_service.SearchCasesResponse( + cases=[], + next_page_token="def", + ), + case_service.SearchCasesResponse( + cases=[ + case.Case(), + ], + next_page_token="ghi", + ), + case_service.SearchCasesResponse( + cases=[ + case.Case(), + case.Case(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.search_cases(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, case.Case) for i in results) + + +def test_search_cases_pages(transport_name: str = "grpc"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_cases), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + case_service.SearchCasesResponse( + cases=[ + case.Case(), + case.Case(), + case.Case(), + ], + next_page_token="abc", + ), + case_service.SearchCasesResponse( + cases=[], + next_page_token="def", + ), + case_service.SearchCasesResponse( + cases=[ + case.Case(), + ], + next_page_token="ghi", + ), + case_service.SearchCasesResponse( + cases=[ + case.Case(), + case.Case(), + ], + ), + RuntimeError, + ) + pages = list(client.search_cases(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_search_cases_async_pager(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_cases), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + case_service.SearchCasesResponse( + cases=[ + case.Case(), + case.Case(), + case.Case(), + ], + next_page_token="abc", + ), + case_service.SearchCasesResponse( + cases=[], + next_page_token="def", + ), + case_service.SearchCasesResponse( + cases=[ + case.Case(), + ], + next_page_token="ghi", + ), + case_service.SearchCasesResponse( + cases=[ + case.Case(), + case.Case(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_cases( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, case.Case) for i in responses) + + +@pytest.mark.asyncio +async def test_search_cases_async_pages(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_cases), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + case_service.SearchCasesResponse( + cases=[ + case.Case(), + case.Case(), + case.Case(), + ], + next_page_token="abc", + ), + case_service.SearchCasesResponse( + cases=[], + next_page_token="def", + ), + case_service.SearchCasesResponse( + cases=[ + case.Case(), + ], + next_page_token="ghi", + ), + case_service.SearchCasesResponse( + cases=[ + case.Case(), + case.Case(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.search_cases(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.CreateCaseRequest, + dict, + ], +) +def test_create_case(request_type, transport: str = "grpc"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_case.Case( + name="name_value", + display_name="display_name_value", + description="description_value", + time_zone="time_zone_value", + subscriber_email_addresses=["subscriber_email_addresses_value"], + state=gcs_case.Case.State.NEW, + contact_email="contact_email_value", + escalated=True, + test_case=True, + language_code="language_code_value", + priority=gcs_case.Case.Priority.P0, + ) + response = client.create_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.CreateCaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_case.Case) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.time_zone == "time_zone_value" + assert response.subscriber_email_addresses == ["subscriber_email_addresses_value"] + assert response.state == gcs_case.Case.State.NEW + assert response.contact_email == "contact_email_value" + assert response.escalated is True + assert response.test_case is True + assert response.language_code == "language_code_value" + assert response.priority == gcs_case.Case.Priority.P0 + + +def test_create_case_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_case), "__call__") as call: + client.create_case() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.CreateCaseRequest() + + +@pytest.mark.asyncio +async def test_create_case_async( + transport: str = "grpc_asyncio", request_type=case_service.CreateCaseRequest +): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcs_case.Case( + name="name_value", + display_name="display_name_value", + description="description_value", + time_zone="time_zone_value", + subscriber_email_addresses=["subscriber_email_addresses_value"], + state=gcs_case.Case.State.NEW, + contact_email="contact_email_value", + escalated=True, + test_case=True, + language_code="language_code_value", + priority=gcs_case.Case.Priority.P0, + ) + ) + response = await client.create_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.CreateCaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_case.Case) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.time_zone == "time_zone_value" + assert response.subscriber_email_addresses == ["subscriber_email_addresses_value"] + assert response.state == gcs_case.Case.State.NEW + assert response.contact_email == "contact_email_value" + assert response.escalated is True + assert response.test_case is True + assert response.language_code == "language_code_value" + assert response.priority == gcs_case.Case.Priority.P0 + + +@pytest.mark.asyncio +async def test_create_case_async_from_dict(): + await test_create_case_async(request_type=dict) + + +def test_create_case_field_headers(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = case_service.CreateCaseRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_case), "__call__") as call: + call.return_value = gcs_case.Case() + client.create_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_case_field_headers_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = case_service.CreateCaseRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_case), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcs_case.Case()) + await client.create_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_case_flattened(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_case.Case() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_case( + parent="parent_value", + case=gcs_case.Case(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].case + mock_val = gcs_case.Case(name="name_value") + assert arg == mock_val + + +def test_create_case_flattened_error(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_case( + case_service.CreateCaseRequest(), + parent="parent_value", + case=gcs_case.Case(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_case_flattened_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_case.Case() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcs_case.Case()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_case( + parent="parent_value", + case=gcs_case.Case(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].case + mock_val = gcs_case.Case(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_case_flattened_error_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_case( + case_service.CreateCaseRequest(), + parent="parent_value", + case=gcs_case.Case(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.UpdateCaseRequest, + dict, + ], +) +def test_update_case(request_type, transport: str = "grpc"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_case.Case( + name="name_value", + display_name="display_name_value", + description="description_value", + time_zone="time_zone_value", + subscriber_email_addresses=["subscriber_email_addresses_value"], + state=gcs_case.Case.State.NEW, + contact_email="contact_email_value", + escalated=True, + test_case=True, + language_code="language_code_value", + priority=gcs_case.Case.Priority.P0, + ) + response = client.update_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.UpdateCaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_case.Case) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.time_zone == "time_zone_value" + assert response.subscriber_email_addresses == ["subscriber_email_addresses_value"] + assert response.state == gcs_case.Case.State.NEW + assert response.contact_email == "contact_email_value" + assert response.escalated is True + assert response.test_case is True + assert response.language_code == "language_code_value" + assert response.priority == gcs_case.Case.Priority.P0 + + +def test_update_case_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_case), "__call__") as call: + client.update_case() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.UpdateCaseRequest() + + +@pytest.mark.asyncio +async def test_update_case_async( + transport: str = "grpc_asyncio", request_type=case_service.UpdateCaseRequest +): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcs_case.Case( + name="name_value", + display_name="display_name_value", + description="description_value", + time_zone="time_zone_value", + subscriber_email_addresses=["subscriber_email_addresses_value"], + state=gcs_case.Case.State.NEW, + contact_email="contact_email_value", + escalated=True, + test_case=True, + language_code="language_code_value", + priority=gcs_case.Case.Priority.P0, + ) + ) + response = await client.update_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.UpdateCaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_case.Case) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.time_zone == "time_zone_value" + assert response.subscriber_email_addresses == ["subscriber_email_addresses_value"] + assert response.state == gcs_case.Case.State.NEW + assert response.contact_email == "contact_email_value" + assert response.escalated is True + assert response.test_case is True + assert response.language_code == "language_code_value" + assert response.priority == gcs_case.Case.Priority.P0 + + +@pytest.mark.asyncio +async def test_update_case_async_from_dict(): + await test_update_case_async(request_type=dict) + + +def test_update_case_field_headers(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = case_service.UpdateCaseRequest() + + request.case.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_case), "__call__") as call: + call.return_value = gcs_case.Case() + client.update_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "case.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_case_field_headers_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = case_service.UpdateCaseRequest() + + request.case.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_case), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcs_case.Case()) + await client.update_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "case.name=name_value", + ) in kw["metadata"] + + +def test_update_case_flattened(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_case.Case() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_case( + case=gcs_case.Case(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].case + mock_val = gcs_case.Case(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_case_flattened_error(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_case( + case_service.UpdateCaseRequest(), + case=gcs_case.Case(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_case_flattened_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_case.Case() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcs_case.Case()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_case( + case=gcs_case.Case(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].case + mock_val = gcs_case.Case(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_case_flattened_error_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_case( + case_service.UpdateCaseRequest(), + case=gcs_case.Case(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.EscalateCaseRequest, + dict, + ], +) +def test_escalate_case(request_type, transport: str = "grpc"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.escalate_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = case.Case( + name="name_value", + display_name="display_name_value", + description="description_value", + time_zone="time_zone_value", + subscriber_email_addresses=["subscriber_email_addresses_value"], + state=case.Case.State.NEW, + contact_email="contact_email_value", + escalated=True, + test_case=True, + language_code="language_code_value", + priority=case.Case.Priority.P0, + ) + response = client.escalate_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.EscalateCaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, case.Case) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.time_zone == "time_zone_value" + assert response.subscriber_email_addresses == ["subscriber_email_addresses_value"] + assert response.state == case.Case.State.NEW + assert response.contact_email == "contact_email_value" + assert response.escalated is True + assert response.test_case is True + assert response.language_code == "language_code_value" + assert response.priority == case.Case.Priority.P0 + + +def test_escalate_case_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.escalate_case), "__call__") as call: + client.escalate_case() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.EscalateCaseRequest() + + +@pytest.mark.asyncio +async def test_escalate_case_async( + transport: str = "grpc_asyncio", request_type=case_service.EscalateCaseRequest +): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.escalate_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + case.Case( + name="name_value", + display_name="display_name_value", + description="description_value", + time_zone="time_zone_value", + subscriber_email_addresses=["subscriber_email_addresses_value"], + state=case.Case.State.NEW, + contact_email="contact_email_value", + escalated=True, + test_case=True, + language_code="language_code_value", + priority=case.Case.Priority.P0, + ) + ) + response = await client.escalate_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.EscalateCaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, case.Case) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.time_zone == "time_zone_value" + assert response.subscriber_email_addresses == ["subscriber_email_addresses_value"] + assert response.state == case.Case.State.NEW + assert response.contact_email == "contact_email_value" + assert response.escalated is True + assert response.test_case is True + assert response.language_code == "language_code_value" + assert response.priority == case.Case.Priority.P0 + + +@pytest.mark.asyncio +async def test_escalate_case_async_from_dict(): + await test_escalate_case_async(request_type=dict) + + +def test_escalate_case_field_headers(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = case_service.EscalateCaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.escalate_case), "__call__") as call: + call.return_value = case.Case() + client.escalate_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_escalate_case_field_headers_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = case_service.EscalateCaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.escalate_case), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(case.Case()) + await client.escalate_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.CloseCaseRequest, + dict, + ], +) +def test_close_case(request_type, transport: str = "grpc"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.close_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = case.Case( + name="name_value", + display_name="display_name_value", + description="description_value", + time_zone="time_zone_value", + subscriber_email_addresses=["subscriber_email_addresses_value"], + state=case.Case.State.NEW, + contact_email="contact_email_value", + escalated=True, + test_case=True, + language_code="language_code_value", + priority=case.Case.Priority.P0, + ) + response = client.close_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.CloseCaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, case.Case) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.time_zone == "time_zone_value" + assert response.subscriber_email_addresses == ["subscriber_email_addresses_value"] + assert response.state == case.Case.State.NEW + assert response.contact_email == "contact_email_value" + assert response.escalated is True + assert response.test_case is True + assert response.language_code == "language_code_value" + assert response.priority == case.Case.Priority.P0 + + +def test_close_case_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.close_case), "__call__") as call: + client.close_case() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.CloseCaseRequest() + + +@pytest.mark.asyncio +async def test_close_case_async( + transport: str = "grpc_asyncio", request_type=case_service.CloseCaseRequest +): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.close_case), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + case.Case( + name="name_value", + display_name="display_name_value", + description="description_value", + time_zone="time_zone_value", + subscriber_email_addresses=["subscriber_email_addresses_value"], + state=case.Case.State.NEW, + contact_email="contact_email_value", + escalated=True, + test_case=True, + language_code="language_code_value", + priority=case.Case.Priority.P0, + ) + ) + response = await client.close_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.CloseCaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, case.Case) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.time_zone == "time_zone_value" + assert response.subscriber_email_addresses == ["subscriber_email_addresses_value"] + assert response.state == case.Case.State.NEW + assert response.contact_email == "contact_email_value" + assert response.escalated is True + assert response.test_case is True + assert response.language_code == "language_code_value" + assert response.priority == case.Case.Priority.P0 + + +@pytest.mark.asyncio +async def test_close_case_async_from_dict(): + await test_close_case_async(request_type=dict) + + +def test_close_case_field_headers(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = case_service.CloseCaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.close_case), "__call__") as call: + call.return_value = case.Case() + client.close_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_close_case_field_headers_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = case_service.CloseCaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.close_case), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(case.Case()) + await client.close_case(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.SearchCaseClassificationsRequest, + dict, + ], +) +def test_search_case_classifications(request_type, transport: str = "grpc"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_case_classifications), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = case_service.SearchCaseClassificationsResponse( + next_page_token="next_page_token_value", + ) + response = client.search_case_classifications(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.SearchCaseClassificationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchCaseClassificationsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_search_case_classifications_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_case_classifications), "__call__" + ) as call: + client.search_case_classifications() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.SearchCaseClassificationsRequest() + + +@pytest.mark.asyncio +async def test_search_case_classifications_async( + transport: str = "grpc_asyncio", + request_type=case_service.SearchCaseClassificationsRequest, +): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_case_classifications), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + case_service.SearchCaseClassificationsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.search_case_classifications(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == case_service.SearchCaseClassificationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchCaseClassificationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_search_case_classifications_async_from_dict(): + await test_search_case_classifications_async(request_type=dict) + + +def test_search_case_classifications_pager(transport_name: str = "grpc"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_case_classifications), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + case_service.SearchCaseClassificationsResponse( + case_classifications=[ + case.CaseClassification(), + case.CaseClassification(), + case.CaseClassification(), + ], + next_page_token="abc", + ), + case_service.SearchCaseClassificationsResponse( + case_classifications=[], + next_page_token="def", + ), + case_service.SearchCaseClassificationsResponse( + case_classifications=[ + case.CaseClassification(), + ], + next_page_token="ghi", + ), + case_service.SearchCaseClassificationsResponse( + case_classifications=[ + case.CaseClassification(), + case.CaseClassification(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.search_case_classifications(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, case.CaseClassification) for i in results) + + +def test_search_case_classifications_pages(transport_name: str = "grpc"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_case_classifications), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + case_service.SearchCaseClassificationsResponse( + case_classifications=[ + case.CaseClassification(), + case.CaseClassification(), + case.CaseClassification(), + ], + next_page_token="abc", + ), + case_service.SearchCaseClassificationsResponse( + case_classifications=[], + next_page_token="def", + ), + case_service.SearchCaseClassificationsResponse( + case_classifications=[ + case.CaseClassification(), + ], + next_page_token="ghi", + ), + case_service.SearchCaseClassificationsResponse( + case_classifications=[ + case.CaseClassification(), + case.CaseClassification(), + ], + ), + RuntimeError, + ) + pages = list(client.search_case_classifications(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_search_case_classifications_async_pager(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_case_classifications), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + case_service.SearchCaseClassificationsResponse( + case_classifications=[ + case.CaseClassification(), + case.CaseClassification(), + case.CaseClassification(), + ], + next_page_token="abc", + ), + case_service.SearchCaseClassificationsResponse( + case_classifications=[], + next_page_token="def", + ), + case_service.SearchCaseClassificationsResponse( + case_classifications=[ + case.CaseClassification(), + ], + next_page_token="ghi", + ), + case_service.SearchCaseClassificationsResponse( + case_classifications=[ + case.CaseClassification(), + case.CaseClassification(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_case_classifications( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, case.CaseClassification) for i in responses) + + +@pytest.mark.asyncio +async def test_search_case_classifications_async_pages(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_case_classifications), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + case_service.SearchCaseClassificationsResponse( + case_classifications=[ + case.CaseClassification(), + case.CaseClassification(), + case.CaseClassification(), + ], + next_page_token="abc", + ), + case_service.SearchCaseClassificationsResponse( + case_classifications=[], + next_page_token="def", + ), + case_service.SearchCaseClassificationsResponse( + case_classifications=[ + case.CaseClassification(), + ], + next_page_token="ghi", + ), + case_service.SearchCaseClassificationsResponse( + case_classifications=[ + case.CaseClassification(), + case.CaseClassification(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.search_case_classifications(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.GetCaseRequest, + dict, + ], +) +def test_get_case_rest(request_type): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/cases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = case.Case( + name="name_value", + display_name="display_name_value", + description="description_value", + time_zone="time_zone_value", + subscriber_email_addresses=["subscriber_email_addresses_value"], + state=case.Case.State.NEW, + contact_email="contact_email_value", + escalated=True, + test_case=True, + language_code="language_code_value", + priority=case.Case.Priority.P0, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = case.Case.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_case(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, case.Case) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.time_zone == "time_zone_value" + assert response.subscriber_email_addresses == ["subscriber_email_addresses_value"] + assert response.state == case.Case.State.NEW + assert response.contact_email == "contact_email_value" + assert response.escalated is True + assert response.test_case is True + assert response.language_code == "language_code_value" + assert response.priority == case.Case.Priority.P0 + + +def test_get_case_rest_required_fields(request_type=case_service.GetCaseRequest): + transport_class = transports.CaseServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_case._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_case._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = case.Case() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = case.Case.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_case(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_case_rest_unset_required_fields(): + transport = transports.CaseServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_case._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_case_rest_interceptors(null_interceptor): + transport = transports.CaseServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CaseServiceRestInterceptor(), + ) + client = CaseServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CaseServiceRestInterceptor, "post_get_case" + ) as post, mock.patch.object( + transports.CaseServiceRestInterceptor, "pre_get_case" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = case_service.GetCaseRequest.pb(case_service.GetCaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = case.Case.to_json(case.Case()) + + request = case_service.GetCaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = case.Case() + + client.get_case( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_case_rest_bad_request( + transport: str = "rest", request_type=case_service.GetCaseRequest +): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/cases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_case(request) + + +def test_get_case_rest_flattened(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = case.Case() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/cases/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = case.Case.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_case(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/cases/*}" % client.transport._host, args[1] + ) + + +def test_get_case_rest_flattened_error(transport: str = "rest"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_case( + case_service.GetCaseRequest(), + name="name_value", + ) + + +def test_get_case_rest_error(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.ListCasesRequest, + dict, + ], +) +def test_list_cases_rest(request_type): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = case_service.ListCasesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = case_service.ListCasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_cases(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCasesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_cases_rest_required_fields(request_type=case_service.ListCasesRequest): + transport_class = transports.CaseServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cases._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = case_service.ListCasesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = case_service.ListCasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_cases(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_cases_rest_unset_required_fields(): + transport = transports.CaseServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_cases._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_cases_rest_interceptors(null_interceptor): + transport = transports.CaseServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CaseServiceRestInterceptor(), + ) + client = CaseServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CaseServiceRestInterceptor, "post_list_cases" + ) as post, mock.patch.object( + transports.CaseServiceRestInterceptor, "pre_list_cases" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = case_service.ListCasesRequest.pb(case_service.ListCasesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = case_service.ListCasesResponse.to_json( + case_service.ListCasesResponse() + ) + + request = case_service.ListCasesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = case_service.ListCasesResponse() + + client.list_cases( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_cases_rest_bad_request( + transport: str = "rest", request_type=case_service.ListCasesRequest +): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_cases(request) + + +def test_list_cases_rest_flattened(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = case_service.ListCasesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = case_service.ListCasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_cases(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*}/cases" % client.transport._host, args[1] + ) + + +def test_list_cases_rest_flattened_error(transport: str = "rest"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_cases( + case_service.ListCasesRequest(), + parent="parent_value", + ) + + +def test_list_cases_rest_pager(transport: str = "rest"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + case_service.ListCasesResponse( + cases=[ + case.Case(), + case.Case(), + case.Case(), + ], + next_page_token="abc", + ), + case_service.ListCasesResponse( + cases=[], + next_page_token="def", + ), + case_service.ListCasesResponse( + cases=[ + case.Case(), + ], + next_page_token="ghi", + ), + case_service.ListCasesResponse( + cases=[ + case.Case(), + case.Case(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(case_service.ListCasesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1"} + + pager = client.list_cases(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, case.Case) for i in results) + + pages = list(client.list_cases(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.SearchCasesRequest, + dict, + ], +) +def test_search_cases_rest(request_type): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = case_service.SearchCasesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = case_service.SearchCasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.search_cases(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchCasesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_cases_rest_interceptors(null_interceptor): + transport = transports.CaseServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CaseServiceRestInterceptor(), + ) + client = CaseServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CaseServiceRestInterceptor, "post_search_cases" + ) as post, mock.patch.object( + transports.CaseServiceRestInterceptor, "pre_search_cases" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = case_service.SearchCasesRequest.pb( + case_service.SearchCasesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = case_service.SearchCasesResponse.to_json( + case_service.SearchCasesResponse() + ) + + request = case_service.SearchCasesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = case_service.SearchCasesResponse() + + client.search_cases( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_cases_rest_bad_request( + transport: str = "rest", request_type=case_service.SearchCasesRequest +): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_cases(request) + + +def test_search_cases_rest_pager(transport: str = "rest"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + case_service.SearchCasesResponse( + cases=[ + case.Case(), + case.Case(), + case.Case(), + ], + next_page_token="abc", + ), + case_service.SearchCasesResponse( + cases=[], + next_page_token="def", + ), + case_service.SearchCasesResponse( + cases=[ + case.Case(), + ], + next_page_token="ghi", + ), + case_service.SearchCasesResponse( + cases=[ + case.Case(), + case.Case(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(case_service.SearchCasesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1"} + + pager = client.search_cases(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, case.Case) for i in results) + + pages = list(client.search_cases(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.CreateCaseRequest, + dict, + ], +) +def test_create_case_rest(request_type): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["case"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "classification": {"id": "id_value", "display_name": "display_name_value"}, + "time_zone": "time_zone_value", + "subscriber_email_addresses": [ + "subscriber_email_addresses_value1", + "subscriber_email_addresses_value2", + ], + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "creator": { + "display_name": "display_name_value", + "email": "email_value", + "google_support": True, + }, + "contact_email": "contact_email_value", + "escalated": True, + "test_case": True, + "language_code": "language_code_value", + "priority": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcs_case.Case( + name="name_value", + display_name="display_name_value", + description="description_value", + time_zone="time_zone_value", + subscriber_email_addresses=["subscriber_email_addresses_value"], + state=gcs_case.Case.State.NEW, + contact_email="contact_email_value", + escalated=True, + test_case=True, + language_code="language_code_value", + priority=gcs_case.Case.Priority.P0, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcs_case.Case.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_case(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_case.Case) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.time_zone == "time_zone_value" + assert response.subscriber_email_addresses == ["subscriber_email_addresses_value"] + assert response.state == gcs_case.Case.State.NEW + assert response.contact_email == "contact_email_value" + assert response.escalated is True + assert response.test_case is True + assert response.language_code == "language_code_value" + assert response.priority == gcs_case.Case.Priority.P0 + + +def test_create_case_rest_required_fields(request_type=case_service.CreateCaseRequest): + transport_class = transports.CaseServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_case._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_case._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcs_case.Case() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcs_case.Case.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_case(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_case_rest_unset_required_fields(): + transport = transports.CaseServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_case._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "case", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_case_rest_interceptors(null_interceptor): + transport = transports.CaseServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CaseServiceRestInterceptor(), + ) + client = CaseServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CaseServiceRestInterceptor, "post_create_case" + ) as post, mock.patch.object( + transports.CaseServiceRestInterceptor, "pre_create_case" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = case_service.CreateCaseRequest.pb(case_service.CreateCaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcs_case.Case.to_json(gcs_case.Case()) + + request = case_service.CreateCaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcs_case.Case() + + client.create_case( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_case_rest_bad_request( + transport: str = "rest", request_type=case_service.CreateCaseRequest +): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["case"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "classification": {"id": "id_value", "display_name": "display_name_value"}, + "time_zone": "time_zone_value", + "subscriber_email_addresses": [ + "subscriber_email_addresses_value1", + "subscriber_email_addresses_value2", + ], + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "creator": { + "display_name": "display_name_value", + "email": "email_value", + "google_support": True, + }, + "contact_email": "contact_email_value", + "escalated": True, + "test_case": True, + "language_code": "language_code_value", + "priority": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_case(request) + + +def test_create_case_rest_flattened(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcs_case.Case() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + case=gcs_case.Case(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcs_case.Case.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_case(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*}/cases" % client.transport._host, args[1] + ) + + +def test_create_case_rest_flattened_error(transport: str = "rest"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_case( + case_service.CreateCaseRequest(), + parent="parent_value", + case=gcs_case.Case(name="name_value"), + ) + + +def test_create_case_rest_error(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.UpdateCaseRequest, + dict, + ], +) +def test_update_case_rest(request_type): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"case": {"name": "projects/sample1/cases/sample2"}} + request_init["case"] = { + "name": "projects/sample1/cases/sample2", + "display_name": "display_name_value", + "description": "description_value", + "classification": {"id": "id_value", "display_name": "display_name_value"}, + "time_zone": "time_zone_value", + "subscriber_email_addresses": [ + "subscriber_email_addresses_value1", + "subscriber_email_addresses_value2", + ], + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "creator": { + "display_name": "display_name_value", + "email": "email_value", + "google_support": True, + }, + "contact_email": "contact_email_value", + "escalated": True, + "test_case": True, + "language_code": "language_code_value", + "priority": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcs_case.Case( + name="name_value", + display_name="display_name_value", + description="description_value", + time_zone="time_zone_value", + subscriber_email_addresses=["subscriber_email_addresses_value"], + state=gcs_case.Case.State.NEW, + contact_email="contact_email_value", + escalated=True, + test_case=True, + language_code="language_code_value", + priority=gcs_case.Case.Priority.P0, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcs_case.Case.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_case(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_case.Case) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.time_zone == "time_zone_value" + assert response.subscriber_email_addresses == ["subscriber_email_addresses_value"] + assert response.state == gcs_case.Case.State.NEW + assert response.contact_email == "contact_email_value" + assert response.escalated is True + assert response.test_case is True + assert response.language_code == "language_code_value" + assert response.priority == gcs_case.Case.Priority.P0 + + +def test_update_case_rest_required_fields(request_type=case_service.UpdateCaseRequest): + transport_class = transports.CaseServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_case._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_case._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcs_case.Case() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcs_case.Case.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_case(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_case_rest_unset_required_fields(): + transport = transports.CaseServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_case._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("case",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_case_rest_interceptors(null_interceptor): + transport = transports.CaseServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CaseServiceRestInterceptor(), + ) + client = CaseServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CaseServiceRestInterceptor, "post_update_case" + ) as post, mock.patch.object( + transports.CaseServiceRestInterceptor, "pre_update_case" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = case_service.UpdateCaseRequest.pb(case_service.UpdateCaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcs_case.Case.to_json(gcs_case.Case()) + + request = case_service.UpdateCaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcs_case.Case() + + client.update_case( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_case_rest_bad_request( + transport: str = "rest", request_type=case_service.UpdateCaseRequest +): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"case": {"name": "projects/sample1/cases/sample2"}} + request_init["case"] = { + "name": "projects/sample1/cases/sample2", + "display_name": "display_name_value", + "description": "description_value", + "classification": {"id": "id_value", "display_name": "display_name_value"}, + "time_zone": "time_zone_value", + "subscriber_email_addresses": [ + "subscriber_email_addresses_value1", + "subscriber_email_addresses_value2", + ], + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "creator": { + "display_name": "display_name_value", + "email": "email_value", + "google_support": True, + }, + "contact_email": "contact_email_value", + "escalated": True, + "test_case": True, + "language_code": "language_code_value", + "priority": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_case(request) + + +def test_update_case_rest_flattened(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcs_case.Case() + + # get arguments that satisfy an http rule for this method + sample_request = {"case": {"name": "projects/sample1/cases/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + case=gcs_case.Case(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcs_case.Case.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_case(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{case.name=projects/*/cases/*}" % client.transport._host, args[1] + ) + + +def test_update_case_rest_flattened_error(transport: str = "rest"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_case( + case_service.UpdateCaseRequest(), + case=gcs_case.Case(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_case_rest_error(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.EscalateCaseRequest, + dict, + ], +) +def test_escalate_case_rest(request_type): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/cases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = case.Case( + name="name_value", + display_name="display_name_value", + description="description_value", + time_zone="time_zone_value", + subscriber_email_addresses=["subscriber_email_addresses_value"], + state=case.Case.State.NEW, + contact_email="contact_email_value", + escalated=True, + test_case=True, + language_code="language_code_value", + priority=case.Case.Priority.P0, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = case.Case.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.escalate_case(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, case.Case) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.time_zone == "time_zone_value" + assert response.subscriber_email_addresses == ["subscriber_email_addresses_value"] + assert response.state == case.Case.State.NEW + assert response.contact_email == "contact_email_value" + assert response.escalated is True + assert response.test_case is True + assert response.language_code == "language_code_value" + assert response.priority == case.Case.Priority.P0 + + +def test_escalate_case_rest_required_fields( + request_type=case_service.EscalateCaseRequest, +): + transport_class = transports.CaseServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).escalate_case._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).escalate_case._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = case.Case() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = case.Case.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.escalate_case(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_escalate_case_rest_unset_required_fields(): + transport = transports.CaseServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.escalate_case._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_escalate_case_rest_interceptors(null_interceptor): + transport = transports.CaseServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CaseServiceRestInterceptor(), + ) + client = CaseServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CaseServiceRestInterceptor, "post_escalate_case" + ) as post, mock.patch.object( + transports.CaseServiceRestInterceptor, "pre_escalate_case" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = case_service.EscalateCaseRequest.pb( + case_service.EscalateCaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = case.Case.to_json(case.Case()) + + request = case_service.EscalateCaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = case.Case() + + client.escalate_case( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_escalate_case_rest_bad_request( + transport: str = "rest", request_type=case_service.EscalateCaseRequest +): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/cases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.escalate_case(request) + + +def test_escalate_case_rest_error(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.CloseCaseRequest, + dict, + ], +) +def test_close_case_rest(request_type): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/cases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = case.Case( + name="name_value", + display_name="display_name_value", + description="description_value", + time_zone="time_zone_value", + subscriber_email_addresses=["subscriber_email_addresses_value"], + state=case.Case.State.NEW, + contact_email="contact_email_value", + escalated=True, + test_case=True, + language_code="language_code_value", + priority=case.Case.Priority.P0, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = case.Case.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.close_case(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, case.Case) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.time_zone == "time_zone_value" + assert response.subscriber_email_addresses == ["subscriber_email_addresses_value"] + assert response.state == case.Case.State.NEW + assert response.contact_email == "contact_email_value" + assert response.escalated is True + assert response.test_case is True + assert response.language_code == "language_code_value" + assert response.priority == case.Case.Priority.P0 + + +def test_close_case_rest_required_fields(request_type=case_service.CloseCaseRequest): + transport_class = transports.CaseServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).close_case._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).close_case._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = case.Case() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = case.Case.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.close_case(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_close_case_rest_unset_required_fields(): + transport = transports.CaseServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.close_case._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_close_case_rest_interceptors(null_interceptor): + transport = transports.CaseServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CaseServiceRestInterceptor(), + ) + client = CaseServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CaseServiceRestInterceptor, "post_close_case" + ) as post, mock.patch.object( + transports.CaseServiceRestInterceptor, "pre_close_case" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = case_service.CloseCaseRequest.pb(case_service.CloseCaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = case.Case.to_json(case.Case()) + + request = case_service.CloseCaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = case.Case() + + client.close_case( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_close_case_rest_bad_request( + transport: str = "rest", request_type=case_service.CloseCaseRequest +): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/cases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.close_case(request) + + +def test_close_case_rest_error(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + case_service.SearchCaseClassificationsRequest, + dict, + ], +) +def test_search_case_classifications_rest(request_type): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = case_service.SearchCaseClassificationsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = case_service.SearchCaseClassificationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.search_case_classifications(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchCaseClassificationsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_case_classifications_rest_interceptors(null_interceptor): + transport = transports.CaseServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CaseServiceRestInterceptor(), + ) + client = CaseServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CaseServiceRestInterceptor, "post_search_case_classifications" + ) as post, mock.patch.object( + transports.CaseServiceRestInterceptor, "pre_search_case_classifications" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = case_service.SearchCaseClassificationsRequest.pb( + case_service.SearchCaseClassificationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + case_service.SearchCaseClassificationsResponse.to_json( + case_service.SearchCaseClassificationsResponse() + ) + ) + + request = case_service.SearchCaseClassificationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = case_service.SearchCaseClassificationsResponse() + + client.search_case_classifications( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_case_classifications_rest_bad_request( + transport: str = "rest", request_type=case_service.SearchCaseClassificationsRequest +): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_case_classifications(request) + + +def test_search_case_classifications_rest_pager(transport: str = "rest"): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + case_service.SearchCaseClassificationsResponse( + case_classifications=[ + case.CaseClassification(), + case.CaseClassification(), + case.CaseClassification(), + ], + next_page_token="abc", + ), + case_service.SearchCaseClassificationsResponse( + case_classifications=[], + next_page_token="def", + ), + case_service.SearchCaseClassificationsResponse( + case_classifications=[ + case.CaseClassification(), + ], + next_page_token="ghi", + ), + case_service.SearchCaseClassificationsResponse( + case_classifications=[ + case.CaseClassification(), + case.CaseClassification(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + case_service.SearchCaseClassificationsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.search_case_classifications(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, case.CaseClassification) for i in results) + + pages = list(client.search_case_classifications(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CaseServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CaseServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CaseServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CaseServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CaseServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CaseServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CaseServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CaseServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CaseServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CaseServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CaseServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CaseServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CaseServiceGrpcTransport, + transports.CaseServiceGrpcAsyncIOTransport, + transports.CaseServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CaseServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CaseServiceGrpcTransport, + ) + + +def test_case_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CaseServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_case_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.support_v2.services.case_service.transports.CaseServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CaseServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_case", + "list_cases", + "search_cases", + "create_case", + "update_case", + "escalate_case", + "close_case", + "search_case_classifications", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_case_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.support_v2.services.case_service.transports.CaseServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CaseServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_case_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.support_v2.services.case_service.transports.CaseServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CaseServiceTransport() + adc.assert_called_once() + + +def test_case_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CaseServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CaseServiceGrpcTransport, + transports.CaseServiceGrpcAsyncIOTransport, + ], +) +def test_case_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CaseServiceGrpcTransport, + transports.CaseServiceGrpcAsyncIOTransport, + transports.CaseServiceRestTransport, + ], +) +def test_case_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CaseServiceGrpcTransport, grpc_helpers), + (transports.CaseServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_case_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "cloudsupport.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="cloudsupport.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.CaseServiceGrpcTransport, transports.CaseServiceGrpcAsyncIOTransport], +) +def test_case_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_case_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CaseServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_case_service_host_no_port(transport_name): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudsupport.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudsupport.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsupport.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_case_service_host_with_port(transport_name): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudsupport.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudsupport.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsupport.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_case_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CaseServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CaseServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_case._session + session2 = client2.transport.get_case._session + assert session1 != session2 + session1 = client1.transport.list_cases._session + session2 = client2.transport.list_cases._session + assert session1 != session2 + session1 = client1.transport.search_cases._session + session2 = client2.transport.search_cases._session + assert session1 != session2 + session1 = client1.transport.create_case._session + session2 = client2.transport.create_case._session + assert session1 != session2 + session1 = client1.transport.update_case._session + session2 = client2.transport.update_case._session + assert session1 != session2 + session1 = client1.transport.escalate_case._session + session2 = client2.transport.escalate_case._session + assert session1 != session2 + session1 = client1.transport.close_case._session + session2 = client2.transport.close_case._session + assert session1 != session2 + session1 = client1.transport.search_case_classifications._session + session2 = client2.transport.search_case_classifications._session + assert session1 != session2 + + +def test_case_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CaseServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_case_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CaseServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.CaseServiceGrpcTransport, transports.CaseServiceGrpcAsyncIOTransport], +) +def test_case_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.CaseServiceGrpcTransport, transports.CaseServiceGrpcAsyncIOTransport], +) +def test_case_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_case_path(): + organization = "squid" + case = "clam" + expected = "organizations/{organization}/cases/{case}".format( + organization=organization, + case=case, + ) + actual = CaseServiceClient.case_path(organization, case) + assert expected == actual + + +def test_parse_case_path(): + expected = { + "organization": "whelk", + "case": "octopus", + } + path = CaseServiceClient.case_path(**expected) + + # Check that the path construction is reversible. + actual = CaseServiceClient.parse_case_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CaseServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = CaseServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CaseServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = CaseServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = CaseServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CaseServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = CaseServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = CaseServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CaseServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = CaseServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = CaseServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CaseServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = CaseServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = CaseServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CaseServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CaseServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CaseServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CaseServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CaseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = CaseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CaseServiceClient, transports.CaseServiceGrpcTransport), + (CaseServiceAsyncClient, transports.CaseServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-support/tests/unit/gapic/support_v2/test_comment_service.py b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_comment_service.py new file mode 100644 index 000000000000..f1485f362fc4 --- /dev/null +++ b/packages/google-cloud-support/tests/unit/gapic/support_v2/test_comment_service.py @@ -0,0 +1,2801 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.support_v2.services.comment_service import ( + CommentServiceAsyncClient, + CommentServiceClient, + pagers, + transports, +) +from google.cloud.support_v2.types import actor +from google.cloud.support_v2.types import comment +from google.cloud.support_v2.types import comment as gcs_comment +from google.cloud.support_v2.types import comment_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CommentServiceClient._get_default_mtls_endpoint(None) is None + assert ( + CommentServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + CommentServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CommentServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CommentServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CommentServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CommentServiceClient, "grpc"), + (CommentServiceAsyncClient, "grpc_asyncio"), + (CommentServiceClient, "rest"), + ], +) +def test_comment_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudsupport.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsupport.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CommentServiceGrpcTransport, "grpc"), + (transports.CommentServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CommentServiceRestTransport, "rest"), + ], +) +def test_comment_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CommentServiceClient, "grpc"), + (CommentServiceAsyncClient, "grpc_asyncio"), + (CommentServiceClient, "rest"), + ], +) +def test_comment_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudsupport.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsupport.googleapis.com" + ) + + +def test_comment_service_client_get_transport_class(): + transport = CommentServiceClient.get_transport_class() + available_transports = [ + transports.CommentServiceGrpcTransport, + transports.CommentServiceRestTransport, + ] + assert transport in available_transports + + transport = CommentServiceClient.get_transport_class("grpc") + assert transport == transports.CommentServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CommentServiceClient, transports.CommentServiceGrpcTransport, "grpc"), + ( + CommentServiceAsyncClient, + transports.CommentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CommentServiceClient, transports.CommentServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + CommentServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CommentServiceClient), +) +@mock.patch.object( + CommentServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CommentServiceAsyncClient), +) +def test_comment_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CommentServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CommentServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CommentServiceClient, transports.CommentServiceGrpcTransport, "grpc", "true"), + ( + CommentServiceAsyncClient, + transports.CommentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CommentServiceClient, transports.CommentServiceGrpcTransport, "grpc", "false"), + ( + CommentServiceAsyncClient, + transports.CommentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (CommentServiceClient, transports.CommentServiceRestTransport, "rest", "true"), + (CommentServiceClient, transports.CommentServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + CommentServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CommentServiceClient), +) +@mock.patch.object( + CommentServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CommentServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_comment_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [CommentServiceClient, CommentServiceAsyncClient] +) +@mock.patch.object( + CommentServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CommentServiceClient), +) +@mock.patch.object( + CommentServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CommentServiceAsyncClient), +) +def test_comment_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CommentServiceClient, transports.CommentServiceGrpcTransport, "grpc"), + ( + CommentServiceAsyncClient, + transports.CommentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CommentServiceClient, transports.CommentServiceRestTransport, "rest"), + ], +) +def test_comment_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CommentServiceClient, + transports.CommentServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CommentServiceAsyncClient, + transports.CommentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (CommentServiceClient, transports.CommentServiceRestTransport, "rest", None), + ], +) +def test_comment_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_comment_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.support_v2.services.comment_service.transports.CommentServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CommentServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CommentServiceClient, + transports.CommentServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CommentServiceAsyncClient, + transports.CommentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_comment_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudsupport.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="cloudsupport.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + comment_service.ListCommentsRequest, + dict, + ], +) +def test_list_comments(request_type, transport: str = "grpc"): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = comment_service.ListCommentsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_comments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == comment_service.ListCommentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCommentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_comments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + client.list_comments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == comment_service.ListCommentsRequest() + + +@pytest.mark.asyncio +async def test_list_comments_async( + transport: str = "grpc_asyncio", request_type=comment_service.ListCommentsRequest +): + client = CommentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + comment_service.ListCommentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_comments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == comment_service.ListCommentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCommentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_comments_async_from_dict(): + await test_list_comments_async(request_type=dict) + + +def test_list_comments_field_headers(): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = comment_service.ListCommentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + call.return_value = comment_service.ListCommentsResponse() + client.list_comments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_comments_field_headers_async(): + client = CommentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = comment_service.ListCommentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + comment_service.ListCommentsResponse() + ) + await client.list_comments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_comments_flattened(): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = comment_service.ListCommentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_comments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_comments_flattened_error(): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_comments( + comment_service.ListCommentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_comments_flattened_async(): + client = CommentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = comment_service.ListCommentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + comment_service.ListCommentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_comments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_comments_flattened_error_async(): + client = CommentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_comments( + comment_service.ListCommentsRequest(), + parent="parent_value", + ) + + +def test_list_comments_pager(transport_name: str = "grpc"): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + comment_service.ListCommentsResponse( + comments=[ + comment.Comment(), + comment.Comment(), + comment.Comment(), + ], + next_page_token="abc", + ), + comment_service.ListCommentsResponse( + comments=[], + next_page_token="def", + ), + comment_service.ListCommentsResponse( + comments=[ + comment.Comment(), + ], + next_page_token="ghi", + ), + comment_service.ListCommentsResponse( + comments=[ + comment.Comment(), + comment.Comment(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_comments(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, comment.Comment) for i in results) + + +def test_list_comments_pages(transport_name: str = "grpc"): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_comments), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + comment_service.ListCommentsResponse( + comments=[ + comment.Comment(), + comment.Comment(), + comment.Comment(), + ], + next_page_token="abc", + ), + comment_service.ListCommentsResponse( + comments=[], + next_page_token="def", + ), + comment_service.ListCommentsResponse( + comments=[ + comment.Comment(), + ], + next_page_token="ghi", + ), + comment_service.ListCommentsResponse( + comments=[ + comment.Comment(), + comment.Comment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_comments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_comments_async_pager(): + client = CommentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_comments), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + comment_service.ListCommentsResponse( + comments=[ + comment.Comment(), + comment.Comment(), + comment.Comment(), + ], + next_page_token="abc", + ), + comment_service.ListCommentsResponse( + comments=[], + next_page_token="def", + ), + comment_service.ListCommentsResponse( + comments=[ + comment.Comment(), + ], + next_page_token="ghi", + ), + comment_service.ListCommentsResponse( + comments=[ + comment.Comment(), + comment.Comment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_comments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, comment.Comment) for i in responses) + + +@pytest.mark.asyncio +async def test_list_comments_async_pages(): + client = CommentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_comments), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + comment_service.ListCommentsResponse( + comments=[ + comment.Comment(), + comment.Comment(), + comment.Comment(), + ], + next_page_token="abc", + ), + comment_service.ListCommentsResponse( + comments=[], + next_page_token="def", + ), + comment_service.ListCommentsResponse( + comments=[ + comment.Comment(), + ], + next_page_token="ghi", + ), + comment_service.ListCommentsResponse( + comments=[ + comment.Comment(), + comment.Comment(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_comments(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + comment_service.CreateCommentRequest, + dict, + ], +) +def test_create_comment(request_type, transport: str = "grpc"): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_comment.Comment( + name="name_value", + body="body_value", + plain_text_body="plain_text_body_value", + ) + response = client.create_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == comment_service.CreateCommentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_comment.Comment) + assert response.name == "name_value" + assert response.body == "body_value" + assert response.plain_text_body == "plain_text_body_value" + + +def test_create_comment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + client.create_comment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == comment_service.CreateCommentRequest() + + +@pytest.mark.asyncio +async def test_create_comment_async( + transport: str = "grpc_asyncio", request_type=comment_service.CreateCommentRequest +): + client = CommentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcs_comment.Comment( + name="name_value", + body="body_value", + plain_text_body="plain_text_body_value", + ) + ) + response = await client.create_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == comment_service.CreateCommentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_comment.Comment) + assert response.name == "name_value" + assert response.body == "body_value" + assert response.plain_text_body == "plain_text_body_value" + + +@pytest.mark.asyncio +async def test_create_comment_async_from_dict(): + await test_create_comment_async(request_type=dict) + + +def test_create_comment_field_headers(): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = comment_service.CreateCommentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + call.return_value = gcs_comment.Comment() + client.create_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_comment_field_headers_async(): + client = CommentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = comment_service.CreateCommentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcs_comment.Comment()) + await client.create_comment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_comment_flattened(): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_comment.Comment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_comment( + parent="parent_value", + comment=gcs_comment.Comment(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].comment + mock_val = gcs_comment.Comment(name="name_value") + assert arg == mock_val + + +def test_create_comment_flattened_error(): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_comment( + comment_service.CreateCommentRequest(), + parent="parent_value", + comment=gcs_comment.Comment(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_comment_flattened_async(): + client = CommentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_comment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_comment.Comment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcs_comment.Comment()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_comment( + parent="parent_value", + comment=gcs_comment.Comment(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].comment + mock_val = gcs_comment.Comment(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_comment_flattened_error_async(): + client = CommentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_comment( + comment_service.CreateCommentRequest(), + parent="parent_value", + comment=gcs_comment.Comment(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + comment_service.ListCommentsRequest, + dict, + ], +) +def test_list_comments_rest(request_type): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/cases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = comment_service.ListCommentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = comment_service.ListCommentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_comments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCommentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_comments_rest_required_fields( + request_type=comment_service.ListCommentsRequest, +): + transport_class = transports.CommentServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_comments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_comments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = comment_service.ListCommentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = comment_service.ListCommentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_comments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_comments_rest_unset_required_fields(): + transport = transports.CommentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_comments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_comments_rest_interceptors(null_interceptor): + transport = transports.CommentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CommentServiceRestInterceptor(), + ) + client = CommentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CommentServiceRestInterceptor, "post_list_comments" + ) as post, mock.patch.object( + transports.CommentServiceRestInterceptor, "pre_list_comments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = comment_service.ListCommentsRequest.pb( + comment_service.ListCommentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = comment_service.ListCommentsResponse.to_json( + comment_service.ListCommentsResponse() + ) + + request = comment_service.ListCommentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = comment_service.ListCommentsResponse() + + client.list_comments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_comments_rest_bad_request( + transport: str = "rest", request_type=comment_service.ListCommentsRequest +): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/cases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_comments(request) + + +def test_list_comments_rest_flattened(): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = comment_service.ListCommentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/cases/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = comment_service.ListCommentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_comments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/cases/*}/comments" % client.transport._host, + args[1], + ) + + +def test_list_comments_rest_flattened_error(transport: str = "rest"): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_comments( + comment_service.ListCommentsRequest(), + parent="parent_value", + ) + + +def test_list_comments_rest_pager(transport: str = "rest"): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + comment_service.ListCommentsResponse( + comments=[ + comment.Comment(), + comment.Comment(), + comment.Comment(), + ], + next_page_token="abc", + ), + comment_service.ListCommentsResponse( + comments=[], + next_page_token="def", + ), + comment_service.ListCommentsResponse( + comments=[ + comment.Comment(), + ], + next_page_token="ghi", + ), + comment_service.ListCommentsResponse( + comments=[ + comment.Comment(), + comment.Comment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + comment_service.ListCommentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/cases/sample2"} + + pager = client.list_comments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, comment.Comment) for i in results) + + pages = list(client.list_comments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + comment_service.CreateCommentRequest, + dict, + ], +) +def test_create_comment_rest(request_type): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/cases/sample2"} + request_init["comment"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "creator": { + "display_name": "display_name_value", + "email": "email_value", + "google_support": True, + }, + "body": "body_value", + "plain_text_body": "plain_text_body_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcs_comment.Comment( + name="name_value", + body="body_value", + plain_text_body="plain_text_body_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcs_comment.Comment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_comment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_comment.Comment) + assert response.name == "name_value" + assert response.body == "body_value" + assert response.plain_text_body == "plain_text_body_value" + + +def test_create_comment_rest_required_fields( + request_type=comment_service.CreateCommentRequest, +): + transport_class = transports.CommentServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_comment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_comment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcs_comment.Comment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcs_comment.Comment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_comment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_comment_rest_unset_required_fields(): + transport = transports.CommentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_comment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "comment", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_comment_rest_interceptors(null_interceptor): + transport = transports.CommentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CommentServiceRestInterceptor(), + ) + client = CommentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CommentServiceRestInterceptor, "post_create_comment" + ) as post, mock.patch.object( + transports.CommentServiceRestInterceptor, "pre_create_comment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = comment_service.CreateCommentRequest.pb( + comment_service.CreateCommentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcs_comment.Comment.to_json(gcs_comment.Comment()) + + request = comment_service.CreateCommentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcs_comment.Comment() + + client.create_comment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_comment_rest_bad_request( + transport: str = "rest", request_type=comment_service.CreateCommentRequest +): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/cases/sample2"} + request_init["comment"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "creator": { + "display_name": "display_name_value", + "email": "email_value", + "google_support": True, + }, + "body": "body_value", + "plain_text_body": "plain_text_body_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_comment(request) + + +def test_create_comment_rest_flattened(): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcs_comment.Comment() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/cases/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + comment=gcs_comment.Comment(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcs_comment.Comment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_comment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/cases/*}/comments" % client.transport._host, + args[1], + ) + + +def test_create_comment_rest_flattened_error(transport: str = "rest"): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_comment( + comment_service.CreateCommentRequest(), + parent="parent_value", + comment=gcs_comment.Comment(name="name_value"), + ) + + +def test_create_comment_rest_error(): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CommentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CommentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CommentServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CommentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CommentServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CommentServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CommentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CommentServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CommentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CommentServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CommentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CommentServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CommentServiceGrpcTransport, + transports.CommentServiceGrpcAsyncIOTransport, + transports.CommentServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CommentServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CommentServiceGrpcTransport, + ) + + +def test_comment_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CommentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_comment_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.support_v2.services.comment_service.transports.CommentServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CommentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_comments", + "create_comment", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_comment_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.support_v2.services.comment_service.transports.CommentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CommentServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_comment_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.support_v2.services.comment_service.transports.CommentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CommentServiceTransport() + adc.assert_called_once() + + +def test_comment_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CommentServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CommentServiceGrpcTransport, + transports.CommentServiceGrpcAsyncIOTransport, + ], +) +def test_comment_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CommentServiceGrpcTransport, + transports.CommentServiceGrpcAsyncIOTransport, + transports.CommentServiceRestTransport, + ], +) +def test_comment_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CommentServiceGrpcTransport, grpc_helpers), + (transports.CommentServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_comment_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "cloudsupport.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="cloudsupport.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CommentServiceGrpcTransport, + transports.CommentServiceGrpcAsyncIOTransport, + ], +) +def test_comment_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_comment_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CommentServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_comment_service_host_no_port(transport_name): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudsupport.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudsupport.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsupport.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_comment_service_host_with_port(transport_name): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudsupport.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudsupport.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsupport.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_comment_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CommentServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CommentServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_comments._session + session2 = client2.transport.list_comments._session + assert session1 != session2 + session1 = client1.transport.create_comment._session + session2 = client2.transport.create_comment._session + assert session1 != session2 + + +def test_comment_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CommentServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_comment_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CommentServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CommentServiceGrpcTransport, + transports.CommentServiceGrpcAsyncIOTransport, + ], +) +def test_comment_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CommentServiceGrpcTransport, + transports.CommentServiceGrpcAsyncIOTransport, + ], +) +def test_comment_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_case_path(): + organization = "squid" + case = "clam" + expected = "organizations/{organization}/cases/{case}".format( + organization=organization, + case=case, + ) + actual = CommentServiceClient.case_path(organization, case) + assert expected == actual + + +def test_parse_case_path(): + expected = { + "organization": "whelk", + "case": "octopus", + } + path = CommentServiceClient.case_path(**expected) + + # Check that the path construction is reversible. + actual = CommentServiceClient.parse_case_path(path) + assert expected == actual + + +def test_comment_path(): + organization = "oyster" + case = "nudibranch" + comment = "cuttlefish" + expected = "organizations/{organization}/cases/{case}/comments/{comment}".format( + organization=organization, + case=case, + comment=comment, + ) + actual = CommentServiceClient.comment_path(organization, case, comment) + assert expected == actual + + +def test_parse_comment_path(): + expected = { + "organization": "mussel", + "case": "winkle", + "comment": "nautilus", + } + path = CommentServiceClient.comment_path(**expected) + + # Check that the path construction is reversible. + actual = CommentServiceClient.parse_comment_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CommentServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = CommentServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CommentServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = CommentServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = CommentServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CommentServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = CommentServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = CommentServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CommentServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = CommentServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = CommentServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CommentServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = CommentServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = CommentServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CommentServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CommentServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CommentServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CommentServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CommentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = CommentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CommentServiceClient, transports.CommentServiceGrpcTransport), + (CommentServiceAsyncClient, transports.CommentServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-vmwareengine/.coveragerc b/packages/google-cloud-vmwareengine/.coveragerc index 0f10915e5f7d..d1696cad7510 100644 --- a/packages/google-cloud-vmwareengine/.coveragerc +++ b/packages/google-cloud-vmwareengine/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/cloud/vmwareengine/__init__.py + google/cloud/vmwareengine/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/google-cloud-vmwareengine/CHANGELOG.md b/packages/google-cloud-vmwareengine/CHANGELOG.md index a85d5fc375ca..174bb2462de8 100644 --- a/packages/google-cloud-vmwareengine/CHANGELOG.md +++ b/packages/google-cloud-vmwareengine/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [1.0.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-vmwareengine-v1.0.0...google-cloud-vmwareengine-v1.0.1) (2023-03-25) + + +### Documentation + +* Fix formatting of request arg in docstring ([#10867](https://github.com/googleapis/google-cloud-python/issues/10867)) ([d34a425](https://github.com/googleapis/google-cloud-python/commit/d34a425f7d0f02bebaf20d24b725b8c25c699697)) + +## [1.0.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-vmwareengine-v0.2.2...google-cloud-vmwareengine-v1.0.0) (2023-01-31) + + +### ⚠ BREAKING CHANGES + +* resource proto messages moved to new file ([#10829](https://github.com/googleapis/google-cloud-python/issues/10829)) + +### Bug Fixes + +* resource proto messages moved to new file ([#10829](https://github.com/googleapis/google-cloud-python/issues/10829)) ([bf1ef3d](https://github.com/googleapis/google-cloud-python/commit/bf1ef3d2db8f8cd2e88d4ab29bff73a1af3ae99c)) + ## [0.2.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-vmwareengine-v0.2.1...google-cloud-vmwareengine-v0.2.2) (2023-01-20) diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/__init__.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/__init__.py index 228703a57306..9bea4b6fbede 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/__init__.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/__init__.py @@ -25,13 +25,11 @@ VmwareEngineClient, ) from google.cloud.vmwareengine_v1.types.vmwareengine import ( - Cluster, CreateClusterRequest, CreateHcxActivationKeyRequest, CreateNetworkPolicyRequest, CreatePrivateCloudRequest, CreateVmwareEngineNetworkRequest, - Credentials, DeleteClusterRequest, DeleteNetworkPolicyRequest, DeletePrivateCloudRequest, @@ -42,8 +40,6 @@ GetNodeTypeRequest, GetPrivateCloudRequest, GetVmwareEngineNetworkRequest, - Hcx, - HcxActivationKey, ListClustersRequest, ListClustersResponse, ListHcxActivationKeysRequest, @@ -58,23 +54,29 @@ ListSubnetsResponse, ListVmwareEngineNetworksRequest, ListVmwareEngineNetworksResponse, - NetworkConfig, - NetworkPolicy, - NodeType, - NodeTypeConfig, - Nsx, OperationMetadata, - PrivateCloud, ResetNsxCredentialsRequest, ResetVcenterCredentialsRequest, ShowNsxCredentialsRequest, ShowVcenterCredentialsRequest, - Subnet, UndeletePrivateCloudRequest, UpdateClusterRequest, UpdateNetworkPolicyRequest, UpdatePrivateCloudRequest, UpdateVmwareEngineNetworkRequest, +) +from google.cloud.vmwareengine_v1.types.vmwareengine_resources import ( + Cluster, + Credentials, + Hcx, + HcxActivationKey, + NetworkConfig, + NetworkPolicy, + NodeType, + NodeTypeConfig, + Nsx, + PrivateCloud, + Subnet, Vcenter, VmwareEngineNetwork, ) @@ -82,13 +84,11 @@ __all__ = ( "VmwareEngineClient", "VmwareEngineAsyncClient", - "Cluster", "CreateClusterRequest", "CreateHcxActivationKeyRequest", "CreateNetworkPolicyRequest", "CreatePrivateCloudRequest", "CreateVmwareEngineNetworkRequest", - "Credentials", "DeleteClusterRequest", "DeleteNetworkPolicyRequest", "DeletePrivateCloudRequest", @@ -99,8 +99,6 @@ "GetNodeTypeRequest", "GetPrivateCloudRequest", "GetVmwareEngineNetworkRequest", - "Hcx", - "HcxActivationKey", "ListClustersRequest", "ListClustersResponse", "ListHcxActivationKeysRequest", @@ -115,23 +113,27 @@ "ListSubnetsResponse", "ListVmwareEngineNetworksRequest", "ListVmwareEngineNetworksResponse", - "NetworkConfig", - "NetworkPolicy", - "NodeType", - "NodeTypeConfig", - "Nsx", "OperationMetadata", - "PrivateCloud", "ResetNsxCredentialsRequest", "ResetVcenterCredentialsRequest", "ShowNsxCredentialsRequest", "ShowVcenterCredentialsRequest", - "Subnet", "UndeletePrivateCloudRequest", "UpdateClusterRequest", "UpdateNetworkPolicyRequest", "UpdatePrivateCloudRequest", "UpdateVmwareEngineNetworkRequest", + "Cluster", + "Credentials", + "Hcx", + "HcxActivationKey", + "NetworkConfig", + "NetworkPolicy", + "NodeType", + "NodeTypeConfig", + "Nsx", + "PrivateCloud", + "Subnet", "Vcenter", "VmwareEngineNetwork", ) diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/gapic_version.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/gapic_version.py index 0d82451c5d9f..1eaab6f24258 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/gapic_version.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.2.2" # {x-release-please-version} +__version__ = "1.0.1" # {x-release-please-version} diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/__init__.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/__init__.py index d2cd0211d47e..2fcb992f4b3d 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/__init__.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/__init__.py @@ -13,20 +13,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.vmwareengine import gapic_version as package_version +from google.cloud.vmwareengine_v1 import gapic_version as package_version __version__ = package_version.__version__ from .services.vmware_engine import VmwareEngineAsyncClient, VmwareEngineClient from .types.vmwareengine import ( - Cluster, CreateClusterRequest, CreateHcxActivationKeyRequest, CreateNetworkPolicyRequest, CreatePrivateCloudRequest, CreateVmwareEngineNetworkRequest, - Credentials, DeleteClusterRequest, DeleteNetworkPolicyRequest, DeletePrivateCloudRequest, @@ -37,8 +35,6 @@ GetNodeTypeRequest, GetPrivateCloudRequest, GetVmwareEngineNetworkRequest, - Hcx, - HcxActivationKey, ListClustersRequest, ListClustersResponse, ListHcxActivationKeysRequest, @@ -53,23 +49,29 @@ ListSubnetsResponse, ListVmwareEngineNetworksRequest, ListVmwareEngineNetworksResponse, - NetworkConfig, - NetworkPolicy, - NodeType, - NodeTypeConfig, - Nsx, OperationMetadata, - PrivateCloud, ResetNsxCredentialsRequest, ResetVcenterCredentialsRequest, ShowNsxCredentialsRequest, ShowVcenterCredentialsRequest, - Subnet, UndeletePrivateCloudRequest, UpdateClusterRequest, UpdateNetworkPolicyRequest, UpdatePrivateCloudRequest, UpdateVmwareEngineNetworkRequest, +) +from .types.vmwareengine_resources import ( + Cluster, + Credentials, + Hcx, + HcxActivationKey, + NetworkConfig, + NetworkPolicy, + NodeType, + NodeTypeConfig, + Nsx, + PrivateCloud, + Subnet, Vcenter, VmwareEngineNetwork, ) diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_version.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_version.py index aa80f74315ce..1eaab6f24258 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_version.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "1.0.1" # {x-release-please-version} diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/async_client.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/async_client.py index a3cff36ed8ac..0ba29fc10301 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/async_client.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/async_client.py @@ -53,7 +53,7 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.vmwareengine_v1.services.vmware_engine import pagers -from google.cloud.vmwareengine_v1.types import vmwareengine +from google.cloud.vmwareengine_v1.types import vmwareengine, vmwareengine_resources from .client import VmwareEngineClient from .transports.base import DEFAULT_CLIENT_INFO, VmwareEngineTransport @@ -378,7 +378,7 @@ async def get_private_cloud( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.PrivateCloud: + ) -> vmwareengine_resources.PrivateCloud: r"""Retrieves a ``PrivateCloud`` resource by its resource name. .. code-block:: python @@ -490,7 +490,7 @@ async def create_private_cloud( request: Optional[Union[vmwareengine.CreatePrivateCloudRequest, dict]] = None, *, parent: Optional[str] = None, - private_cloud: Optional[vmwareengine.PrivateCloud] = None, + private_cloud: Optional[vmwareengine_resources.PrivateCloud] = None, private_cloud_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -522,6 +522,7 @@ async def sample_create_private_cloud(): # Initialize request argument(s) private_cloud = vmwareengine_v1.PrivateCloud() private_cloud.network_config.management_cidr = "management_cidr_value" + private_cloud.management_cluster.cluster_id = "cluster_id_value" request = vmwareengine_v1.CreatePrivateCloudRequest( parent="parent_value", @@ -642,7 +643,7 @@ async def sample_create_private_cloud(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine.PrivateCloud, + vmwareengine_resources.PrivateCloud, metadata_type=vmwareengine.OperationMetadata, ) @@ -653,7 +654,7 @@ async def update_private_cloud( self, request: Optional[Union[vmwareengine.UpdatePrivateCloudRequest, dict]] = None, *, - private_cloud: Optional[vmwareengine.PrivateCloud] = None, + private_cloud: Optional[vmwareengine_resources.PrivateCloud] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -686,6 +687,7 @@ async def sample_update_private_cloud(): # Initialize request argument(s) private_cloud = vmwareengine_v1.PrivateCloud() private_cloud.network_config.management_cidr = "management_cidr_value" + private_cloud.management_cluster.cluster_id = "cluster_id_value" request = vmwareengine_v1.UpdatePrivateCloudRequest( private_cloud=private_cloud, @@ -785,7 +787,7 @@ async def sample_update_private_cloud(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine.PrivateCloud, + vmwareengine_resources.PrivateCloud, metadata_type=vmwareengine.OperationMetadata, ) @@ -924,7 +926,7 @@ async def sample_delete_private_cloud(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine.PrivateCloud, + vmwareengine_resources.PrivateCloud, metadata_type=vmwareengine.OperationMetadata, ) @@ -1050,7 +1052,7 @@ async def sample_undelete_private_cloud(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine.PrivateCloud, + vmwareengine_resources.PrivateCloud, metadata_type=vmwareengine.OperationMetadata, ) @@ -1193,7 +1195,7 @@ async def get_cluster( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.Cluster: + ) -> vmwareengine_resources.Cluster: r"""Retrieves a ``Cluster`` resource by its resource name. .. code-block:: python @@ -1303,7 +1305,7 @@ async def create_cluster( request: Optional[Union[vmwareengine.CreateClusterRequest, dict]] = None, *, parent: Optional[str] = None, - cluster: Optional[vmwareengine.Cluster] = None, + cluster: Optional[vmwareengine_resources.Cluster] = None, cluster_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -1448,7 +1450,7 @@ async def sample_create_cluster(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine.Cluster, + vmwareengine_resources.Cluster, metadata_type=vmwareengine.OperationMetadata, ) @@ -1459,7 +1461,7 @@ async def update_cluster( self, request: Optional[Union[vmwareengine.UpdateClusterRequest, dict]] = None, *, - cluster: Optional[vmwareengine.Cluster] = None, + cluster: Optional[vmwareengine_resources.Cluster] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -1587,7 +1589,7 @@ async def sample_update_cluster(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine.Cluster, + vmwareengine_resources.Cluster, metadata_type=vmwareengine.OperationMetadata, ) @@ -1988,7 +1990,7 @@ async def get_node_type( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.NodeType: + ) -> vmwareengine_resources.NodeType: r"""Gets details of a single ``NodeType``. .. code-block:: python @@ -2101,7 +2103,7 @@ async def show_nsx_credentials( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.Credentials: + ) -> vmwareengine_resources.Credentials: r"""Gets details of credentials for NSX appliance. .. code-block:: python @@ -2218,7 +2220,7 @@ async def show_vcenter_credentials( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.Credentials: + ) -> vmwareengine_resources.Credentials: r"""Gets details of credentials for Vcenter appliance. .. code-block:: python @@ -2442,7 +2444,7 @@ async def sample_reset_nsx_credentials(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine.PrivateCloud, + vmwareengine_resources.PrivateCloud, metadata_type=vmwareengine.OperationMetadata, ) @@ -2568,7 +2570,7 @@ async def sample_reset_vcenter_credentials(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine.PrivateCloud, + vmwareengine_resources.PrivateCloud, metadata_type=vmwareengine.OperationMetadata, ) @@ -2582,7 +2584,7 @@ async def create_hcx_activation_key( ] = None, *, parent: Optional[str] = None, - hcx_activation_key: Optional[vmwareengine.HcxActivationKey] = None, + hcx_activation_key: Optional[vmwareengine_resources.HcxActivationKey] = None, hcx_activation_key_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -2732,7 +2734,7 @@ async def sample_create_hcx_activation_key(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine.HcxActivationKey, + vmwareengine_resources.HcxActivationKey, metadata_type=vmwareengine.OperationMetadata, ) @@ -2877,7 +2879,7 @@ async def get_hcx_activation_key( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.HcxActivationKey: + ) -> vmwareengine_resources.HcxActivationKey: r"""Retrieves a ``HcxActivationKey`` resource by its resource name. .. code-block:: python @@ -2998,7 +3000,7 @@ async def get_network_policy( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.NetworkPolicy: + ) -> vmwareengine_resources.NetworkPolicy: r"""Retrieves a ``NetworkPolicy`` resource by its resource name. .. code-block:: python @@ -3247,7 +3249,7 @@ async def create_network_policy( request: Optional[Union[vmwareengine.CreateNetworkPolicyRequest, dict]] = None, *, parent: Optional[str] = None, - network_policy: Optional[vmwareengine.NetworkPolicy] = None, + network_policy: Optional[vmwareengine_resources.NetworkPolicy] = None, network_policy_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -3401,7 +3403,7 @@ async def sample_create_network_policy(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine.NetworkPolicy, + vmwareengine_resources.NetworkPolicy, metadata_type=vmwareengine.OperationMetadata, ) @@ -3412,7 +3414,7 @@ async def update_network_policy( self, request: Optional[Union[vmwareengine.UpdateNetworkPolicyRequest, dict]] = None, *, - network_policy: Optional[vmwareengine.NetworkPolicy] = None, + network_policy: Optional[vmwareengine_resources.NetworkPolicy] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -3553,7 +3555,7 @@ async def sample_update_network_policy(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine.NetworkPolicy, + vmwareengine_resources.NetworkPolicy, metadata_type=vmwareengine.OperationMetadata, ) @@ -3697,7 +3699,9 @@ async def create_vmware_engine_network( ] = None, *, parent: Optional[str] = None, - vmware_engine_network: Optional[vmwareengine.VmwareEngineNetwork] = None, + vmware_engine_network: Optional[ + vmwareengine_resources.VmwareEngineNetwork + ] = None, vmware_engine_network_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -3851,7 +3855,7 @@ async def sample_create_vmware_engine_network(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine.VmwareEngineNetwork, + vmwareengine_resources.VmwareEngineNetwork, metadata_type=vmwareengine.OperationMetadata, ) @@ -3864,7 +3868,9 @@ async def update_vmware_engine_network( Union[vmwareengine.UpdateVmwareEngineNetworkRequest, dict] ] = None, *, - vmware_engine_network: Optional[vmwareengine.VmwareEngineNetwork] = None, + vmware_engine_network: Optional[ + vmwareengine_resources.VmwareEngineNetwork + ] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -3992,7 +3998,7 @@ async def sample_update_vmware_engine_network(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine.VmwareEngineNetwork, + vmwareengine_resources.VmwareEngineNetwork, metadata_type=vmwareengine.OperationMetadata, ) @@ -4142,7 +4148,7 @@ async def get_vmware_engine_network( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.VmwareEngineNetwork: + ) -> vmwareengine_resources.VmwareEngineNetwork: r"""Retrieves a ``VmwareEngineNetwork`` resource by its resource name. The resource contains details of the VMware Engine network, such as its VMware Engine network type, peered networks diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/client.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/client.py index c83ce5670251..a761bc3e29ff 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/client.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/client.py @@ -57,7 +57,7 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.vmwareengine_v1.services.vmware_engine import pagers -from google.cloud.vmwareengine_v1.types import vmwareengine +from google.cloud.vmwareengine_v1.types import vmwareengine, vmwareengine_resources from .transports.base import DEFAULT_CLIENT_INFO, VmwareEngineTransport from .transports.grpc import VmwareEngineGrpcTransport @@ -735,7 +735,7 @@ def get_private_cloud( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.PrivateCloud: + ) -> vmwareengine_resources.PrivateCloud: r"""Retrieves a ``PrivateCloud`` resource by its resource name. .. code-block:: python @@ -838,7 +838,7 @@ def create_private_cloud( request: Optional[Union[vmwareengine.CreatePrivateCloudRequest, dict]] = None, *, parent: Optional[str] = None, - private_cloud: Optional[vmwareengine.PrivateCloud] = None, + private_cloud: Optional[vmwareengine_resources.PrivateCloud] = None, private_cloud_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -870,6 +870,7 @@ def sample_create_private_cloud(): # Initialize request argument(s) private_cloud = vmwareengine_v1.PrivateCloud() private_cloud.network_config.management_cidr = "management_cidr_value" + private_cloud.management_cluster.cluster_id = "cluster_id_value" request = vmwareengine_v1.CreatePrivateCloudRequest( parent="parent_value", @@ -990,7 +991,7 @@ def sample_create_private_cloud(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine.PrivateCloud, + vmwareengine_resources.PrivateCloud, metadata_type=vmwareengine.OperationMetadata, ) @@ -1001,7 +1002,7 @@ def update_private_cloud( self, request: Optional[Union[vmwareengine.UpdatePrivateCloudRequest, dict]] = None, *, - private_cloud: Optional[vmwareengine.PrivateCloud] = None, + private_cloud: Optional[vmwareengine_resources.PrivateCloud] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -1034,6 +1035,7 @@ def sample_update_private_cloud(): # Initialize request argument(s) private_cloud = vmwareengine_v1.PrivateCloud() private_cloud.network_config.management_cidr = "management_cidr_value" + private_cloud.management_cluster.cluster_id = "cluster_id_value" request = vmwareengine_v1.UpdatePrivateCloudRequest( private_cloud=private_cloud, @@ -1133,7 +1135,7 @@ def sample_update_private_cloud(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine.PrivateCloud, + vmwareengine_resources.PrivateCloud, metadata_type=vmwareengine.OperationMetadata, ) @@ -1272,7 +1274,7 @@ def sample_delete_private_cloud(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine.PrivateCloud, + vmwareengine_resources.PrivateCloud, metadata_type=vmwareengine.OperationMetadata, ) @@ -1398,7 +1400,7 @@ def sample_undelete_private_cloud(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine.PrivateCloud, + vmwareengine_resources.PrivateCloud, metadata_type=vmwareengine.OperationMetadata, ) @@ -1532,7 +1534,7 @@ def get_cluster( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.Cluster: + ) -> vmwareengine_resources.Cluster: r"""Retrieves a ``Cluster`` resource by its resource name. .. code-block:: python @@ -1633,7 +1635,7 @@ def create_cluster( request: Optional[Union[vmwareengine.CreateClusterRequest, dict]] = None, *, parent: Optional[str] = None, - cluster: Optional[vmwareengine.Cluster] = None, + cluster: Optional[vmwareengine_resources.Cluster] = None, cluster_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -1778,7 +1780,7 @@ def sample_create_cluster(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine.Cluster, + vmwareengine_resources.Cluster, metadata_type=vmwareengine.OperationMetadata, ) @@ -1789,7 +1791,7 @@ def update_cluster( self, request: Optional[Union[vmwareengine.UpdateClusterRequest, dict]] = None, *, - cluster: Optional[vmwareengine.Cluster] = None, + cluster: Optional[vmwareengine_resources.Cluster] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -1917,7 +1919,7 @@ def sample_update_cluster(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine.Cluster, + vmwareengine_resources.Cluster, metadata_type=vmwareengine.OperationMetadata, ) @@ -2300,7 +2302,7 @@ def get_node_type( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.NodeType: + ) -> vmwareengine_resources.NodeType: r"""Gets details of a single ``NodeType``. .. code-block:: python @@ -2404,7 +2406,7 @@ def show_nsx_credentials( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.Credentials: + ) -> vmwareengine_resources.Credentials: r"""Gets details of credentials for NSX appliance. .. code-block:: python @@ -2512,7 +2514,7 @@ def show_vcenter_credentials( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.Credentials: + ) -> vmwareengine_resources.Credentials: r"""Gets details of credentials for Vcenter appliance. .. code-block:: python @@ -2727,7 +2729,7 @@ def sample_reset_nsx_credentials(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine.PrivateCloud, + vmwareengine_resources.PrivateCloud, metadata_type=vmwareengine.OperationMetadata, ) @@ -2855,7 +2857,7 @@ def sample_reset_vcenter_credentials(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine.PrivateCloud, + vmwareengine_resources.PrivateCloud, metadata_type=vmwareengine.OperationMetadata, ) @@ -2869,7 +2871,7 @@ def create_hcx_activation_key( ] = None, *, parent: Optional[str] = None, - hcx_activation_key: Optional[vmwareengine.HcxActivationKey] = None, + hcx_activation_key: Optional[vmwareengine_resources.HcxActivationKey] = None, hcx_activation_key_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -3021,7 +3023,7 @@ def sample_create_hcx_activation_key(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine.HcxActivationKey, + vmwareengine_resources.HcxActivationKey, metadata_type=vmwareengine.OperationMetadata, ) @@ -3157,7 +3159,7 @@ def get_hcx_activation_key( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.HcxActivationKey: + ) -> vmwareengine_resources.HcxActivationKey: r"""Retrieves a ``HcxActivationKey`` resource by its resource name. .. code-block:: python @@ -3269,7 +3271,7 @@ def get_network_policy( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.NetworkPolicy: + ) -> vmwareengine_resources.NetworkPolicy: r"""Retrieves a ``NetworkPolicy`` resource by its resource name. .. code-block:: python @@ -3500,7 +3502,7 @@ def create_network_policy( request: Optional[Union[vmwareengine.CreateNetworkPolicyRequest, dict]] = None, *, parent: Optional[str] = None, - network_policy: Optional[vmwareengine.NetworkPolicy] = None, + network_policy: Optional[vmwareengine_resources.NetworkPolicy] = None, network_policy_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -3654,7 +3656,7 @@ def sample_create_network_policy(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine.NetworkPolicy, + vmwareengine_resources.NetworkPolicy, metadata_type=vmwareengine.OperationMetadata, ) @@ -3665,7 +3667,7 @@ def update_network_policy( self, request: Optional[Union[vmwareengine.UpdateNetworkPolicyRequest, dict]] = None, *, - network_policy: Optional[vmwareengine.NetworkPolicy] = None, + network_policy: Optional[vmwareengine_resources.NetworkPolicy] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -3806,7 +3808,7 @@ def sample_update_network_policy(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine.NetworkPolicy, + vmwareengine_resources.NetworkPolicy, metadata_type=vmwareengine.OperationMetadata, ) @@ -3950,7 +3952,9 @@ def create_vmware_engine_network( ] = None, *, parent: Optional[str] = None, - vmware_engine_network: Optional[vmwareengine.VmwareEngineNetwork] = None, + vmware_engine_network: Optional[ + vmwareengine_resources.VmwareEngineNetwork + ] = None, vmware_engine_network_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -4106,7 +4110,7 @@ def sample_create_vmware_engine_network(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine.VmwareEngineNetwork, + vmwareengine_resources.VmwareEngineNetwork, metadata_type=vmwareengine.OperationMetadata, ) @@ -4119,7 +4123,9 @@ def update_vmware_engine_network( Union[vmwareengine.UpdateVmwareEngineNetworkRequest, dict] ] = None, *, - vmware_engine_network: Optional[vmwareengine.VmwareEngineNetwork] = None, + vmware_engine_network: Optional[ + vmwareengine_resources.VmwareEngineNetwork + ] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -4249,7 +4255,7 @@ def sample_update_vmware_engine_network(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine.VmwareEngineNetwork, + vmwareengine_resources.VmwareEngineNetwork, metadata_type=vmwareengine.OperationMetadata, ) @@ -4401,7 +4407,7 @@ def get_vmware_engine_network( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.VmwareEngineNetwork: + ) -> vmwareengine_resources.VmwareEngineNetwork: r"""Retrieves a ``VmwareEngineNetwork`` resource by its resource name. The resource contains details of the VMware Engine network, such as its VMware Engine network type, peered networks diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/pagers.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/pagers.py index 4f7c73f84d8a..769f91d2b9a1 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/pagers.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/pagers.py @@ -24,7 +24,7 @@ Tuple, ) -from google.cloud.vmwareengine_v1.types import vmwareengine +from google.cloud.vmwareengine_v1.types import vmwareengine, vmwareengine_resources class ListPrivateCloudsPager: @@ -81,7 +81,7 @@ def pages(self) -> Iterator[vmwareengine.ListPrivateCloudsResponse]: self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterator[vmwareengine.PrivateCloud]: + def __iter__(self) -> Iterator[vmwareengine_resources.PrivateCloud]: for page in self.pages: yield from page.private_clouds @@ -143,7 +143,7 @@ async def pages(self) -> AsyncIterator[vmwareengine.ListPrivateCloudsResponse]: self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[vmwareengine.PrivateCloud]: + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.PrivateCloud]: async def async_generator(): async for page in self.pages: for response in page.private_clouds: @@ -209,7 +209,7 @@ def pages(self) -> Iterator[vmwareengine.ListClustersResponse]: self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterator[vmwareengine.Cluster]: + def __iter__(self) -> Iterator[vmwareengine_resources.Cluster]: for page in self.pages: yield from page.clusters @@ -271,7 +271,7 @@ async def pages(self) -> AsyncIterator[vmwareengine.ListClustersResponse]: self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[vmwareengine.Cluster]: + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.Cluster]: async def async_generator(): async for page in self.pages: for response in page.clusters: @@ -337,7 +337,7 @@ def pages(self) -> Iterator[vmwareengine.ListSubnetsResponse]: self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterator[vmwareengine.Subnet]: + def __iter__(self) -> Iterator[vmwareengine_resources.Subnet]: for page in self.pages: yield from page.subnets @@ -399,7 +399,7 @@ async def pages(self) -> AsyncIterator[vmwareengine.ListSubnetsResponse]: self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[vmwareengine.Subnet]: + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.Subnet]: async def async_generator(): async for page in self.pages: for response in page.subnets: @@ -465,7 +465,7 @@ def pages(self) -> Iterator[vmwareengine.ListNodeTypesResponse]: self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterator[vmwareengine.NodeType]: + def __iter__(self) -> Iterator[vmwareengine_resources.NodeType]: for page in self.pages: yield from page.node_types @@ -527,7 +527,7 @@ async def pages(self) -> AsyncIterator[vmwareengine.ListNodeTypesResponse]: self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[vmwareengine.NodeType]: + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.NodeType]: async def async_generator(): async for page in self.pages: for response in page.node_types: @@ -593,7 +593,7 @@ def pages(self) -> Iterator[vmwareengine.ListHcxActivationKeysResponse]: self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterator[vmwareengine.HcxActivationKey]: + def __iter__(self) -> Iterator[vmwareengine_resources.HcxActivationKey]: for page in self.pages: yield from page.hcx_activation_keys @@ -655,7 +655,7 @@ async def pages(self) -> AsyncIterator[vmwareengine.ListHcxActivationKeysRespons self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[vmwareengine.HcxActivationKey]: + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.HcxActivationKey]: async def async_generator(): async for page in self.pages: for response in page.hcx_activation_keys: @@ -721,7 +721,7 @@ def pages(self) -> Iterator[vmwareengine.ListNetworkPoliciesResponse]: self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterator[vmwareengine.NetworkPolicy]: + def __iter__(self) -> Iterator[vmwareengine_resources.NetworkPolicy]: for page in self.pages: yield from page.network_policies @@ -783,7 +783,7 @@ async def pages(self) -> AsyncIterator[vmwareengine.ListNetworkPoliciesResponse] self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[vmwareengine.NetworkPolicy]: + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.NetworkPolicy]: async def async_generator(): async for page in self.pages: for response in page.network_policies: @@ -849,7 +849,7 @@ def pages(self) -> Iterator[vmwareengine.ListVmwareEngineNetworksResponse]: self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterator[vmwareengine.VmwareEngineNetwork]: + def __iter__(self) -> Iterator[vmwareengine_resources.VmwareEngineNetwork]: for page in self.pages: yield from page.vmware_engine_networks @@ -913,7 +913,7 @@ async def pages( self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[vmwareengine.VmwareEngineNetwork]: + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.VmwareEngineNetwork]: async def async_generator(): async for page in self.pages: for response in page.vmware_engine_networks: diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/base.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/base.py index 99a1cb32ac98..0092558577d5 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/base.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/base.py @@ -29,7 +29,7 @@ from google.oauth2 import service_account # type: ignore from google.cloud.vmwareengine_v1 import gapic_version as package_version -from google.cloud.vmwareengine_v1.types import vmwareengine +from google.cloud.vmwareengine_v1.types import vmwareengine, vmwareengine_resources DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -449,7 +449,10 @@ def get_private_cloud( self, ) -> Callable[ [vmwareengine.GetPrivateCloudRequest], - Union[vmwareengine.PrivateCloud, Awaitable[vmwareengine.PrivateCloud]], + Union[ + vmwareengine_resources.PrivateCloud, + Awaitable[vmwareengine_resources.PrivateCloud], + ], ]: raise NotImplementedError() @@ -506,7 +509,9 @@ def get_cluster( self, ) -> Callable[ [vmwareengine.GetClusterRequest], - Union[vmwareengine.Cluster, Awaitable[vmwareengine.Cluster]], + Union[ + vmwareengine_resources.Cluster, Awaitable[vmwareengine_resources.Cluster] + ], ]: raise NotImplementedError() @@ -566,7 +571,9 @@ def get_node_type( self, ) -> Callable[ [vmwareengine.GetNodeTypeRequest], - Union[vmwareengine.NodeType, Awaitable[vmwareengine.NodeType]], + Union[ + vmwareengine_resources.NodeType, Awaitable[vmwareengine_resources.NodeType] + ], ]: raise NotImplementedError() @@ -575,7 +582,10 @@ def show_nsx_credentials( self, ) -> Callable[ [vmwareengine.ShowNsxCredentialsRequest], - Union[vmwareengine.Credentials, Awaitable[vmwareengine.Credentials]], + Union[ + vmwareengine_resources.Credentials, + Awaitable[vmwareengine_resources.Credentials], + ], ]: raise NotImplementedError() @@ -584,7 +594,10 @@ def show_vcenter_credentials( self, ) -> Callable[ [vmwareengine.ShowVcenterCredentialsRequest], - Union[vmwareengine.Credentials, Awaitable[vmwareengine.Credentials]], + Union[ + vmwareengine_resources.Credentials, + Awaitable[vmwareengine_resources.Credentials], + ], ]: raise NotImplementedError() @@ -632,7 +645,10 @@ def get_hcx_activation_key( self, ) -> Callable[ [vmwareengine.GetHcxActivationKeyRequest], - Union[vmwareengine.HcxActivationKey, Awaitable[vmwareengine.HcxActivationKey]], + Union[ + vmwareengine_resources.HcxActivationKey, + Awaitable[vmwareengine_resources.HcxActivationKey], + ], ]: raise NotImplementedError() @@ -641,7 +657,10 @@ def get_network_policy( self, ) -> Callable[ [vmwareengine.GetNetworkPolicyRequest], - Union[vmwareengine.NetworkPolicy, Awaitable[vmwareengine.NetworkPolicy]], + Union[ + vmwareengine_resources.NetworkPolicy, + Awaitable[vmwareengine_resources.NetworkPolicy], + ], ]: raise NotImplementedError() @@ -717,8 +736,8 @@ def get_vmware_engine_network( ) -> Callable[ [vmwareengine.GetVmwareEngineNetworkRequest], Union[ - vmwareengine.VmwareEngineNetwork, - Awaitable[vmwareengine.VmwareEngineNetwork], + vmwareengine_resources.VmwareEngineNetwork, + Awaitable[vmwareengine_resources.VmwareEngineNetwork], ], ]: raise NotImplementedError() diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc.py index 3b8606a485ec..3e8e4cc1fc4d 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc.py @@ -26,7 +26,7 @@ from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore -from google.cloud.vmwareengine_v1.types import vmwareengine +from google.cloud.vmwareengine_v1.types import vmwareengine, vmwareengine_resources from .base import DEFAULT_CLIENT_INFO, VmwareEngineTransport @@ -279,7 +279,9 @@ def list_private_clouds( @property def get_private_cloud( self, - ) -> Callable[[vmwareengine.GetPrivateCloudRequest], vmwareengine.PrivateCloud]: + ) -> Callable[ + [vmwareengine.GetPrivateCloudRequest], vmwareengine_resources.PrivateCloud + ]: r"""Return a callable for the get private cloud method over gRPC. Retrieves a ``PrivateCloud`` resource by its resource name. @@ -298,7 +300,7 @@ def get_private_cloud( self._stubs["get_private_cloud"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/GetPrivateCloud", request_serializer=vmwareengine.GetPrivateCloudRequest.serialize, - response_deserializer=vmwareengine.PrivateCloud.deserialize, + response_deserializer=vmwareengine_resources.PrivateCloud.deserialize, ) return self._stubs["get_private_cloud"] @@ -471,7 +473,7 @@ def list_clusters( @property def get_cluster( self, - ) -> Callable[[vmwareengine.GetClusterRequest], vmwareengine.Cluster]: + ) -> Callable[[vmwareengine.GetClusterRequest], vmwareengine_resources.Cluster]: r"""Return a callable for the get cluster method over gRPC. Retrieves a ``Cluster`` resource by its resource name. @@ -490,7 +492,7 @@ def get_cluster( self._stubs["get_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/GetCluster", request_serializer=vmwareengine.GetClusterRequest.serialize, - response_deserializer=vmwareengine.Cluster.deserialize, + response_deserializer=vmwareengine_resources.Cluster.deserialize, ) return self._stubs["get_cluster"] @@ -642,7 +644,7 @@ def list_node_types( @property def get_node_type( self, - ) -> Callable[[vmwareengine.GetNodeTypeRequest], vmwareengine.NodeType]: + ) -> Callable[[vmwareengine.GetNodeTypeRequest], vmwareengine_resources.NodeType]: r"""Return a callable for the get node type method over gRPC. Gets details of a single ``NodeType``. @@ -661,14 +663,16 @@ def get_node_type( self._stubs["get_node_type"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/GetNodeType", request_serializer=vmwareengine.GetNodeTypeRequest.serialize, - response_deserializer=vmwareengine.NodeType.deserialize, + response_deserializer=vmwareengine_resources.NodeType.deserialize, ) return self._stubs["get_node_type"] @property def show_nsx_credentials( self, - ) -> Callable[[vmwareengine.ShowNsxCredentialsRequest], vmwareengine.Credentials]: + ) -> Callable[ + [vmwareengine.ShowNsxCredentialsRequest], vmwareengine_resources.Credentials + ]: r"""Return a callable for the show nsx credentials method over gRPC. Gets details of credentials for NSX appliance. @@ -687,7 +691,7 @@ def show_nsx_credentials( self._stubs["show_nsx_credentials"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/ShowNsxCredentials", request_serializer=vmwareengine.ShowNsxCredentialsRequest.serialize, - response_deserializer=vmwareengine.Credentials.deserialize, + response_deserializer=vmwareengine_resources.Credentials.deserialize, ) return self._stubs["show_nsx_credentials"] @@ -695,7 +699,7 @@ def show_nsx_credentials( def show_vcenter_credentials( self, ) -> Callable[ - [vmwareengine.ShowVcenterCredentialsRequest], vmwareengine.Credentials + [vmwareengine.ShowVcenterCredentialsRequest], vmwareengine_resources.Credentials ]: r"""Return a callable for the show vcenter credentials method over gRPC. @@ -715,7 +719,7 @@ def show_vcenter_credentials( self._stubs["show_vcenter_credentials"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/ShowVcenterCredentials", request_serializer=vmwareengine.ShowVcenterCredentialsRequest.serialize, - response_deserializer=vmwareengine.Credentials.deserialize, + response_deserializer=vmwareengine_resources.Credentials.deserialize, ) return self._stubs["show_vcenter_credentials"] @@ -835,7 +839,8 @@ def list_hcx_activation_keys( def get_hcx_activation_key( self, ) -> Callable[ - [vmwareengine.GetHcxActivationKeyRequest], vmwareengine.HcxActivationKey + [vmwareengine.GetHcxActivationKeyRequest], + vmwareengine_resources.HcxActivationKey, ]: r"""Return a callable for the get hcx activation key method over gRPC. @@ -855,14 +860,16 @@ def get_hcx_activation_key( self._stubs["get_hcx_activation_key"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/GetHcxActivationKey", request_serializer=vmwareengine.GetHcxActivationKeyRequest.serialize, - response_deserializer=vmwareengine.HcxActivationKey.deserialize, + response_deserializer=vmwareengine_resources.HcxActivationKey.deserialize, ) return self._stubs["get_hcx_activation_key"] @property def get_network_policy( self, - ) -> Callable[[vmwareengine.GetNetworkPolicyRequest], vmwareengine.NetworkPolicy]: + ) -> Callable[ + [vmwareengine.GetNetworkPolicyRequest], vmwareengine_resources.NetworkPolicy + ]: r"""Return a callable for the get network policy method over gRPC. Retrieves a ``NetworkPolicy`` resource by its resource name. @@ -881,7 +888,7 @@ def get_network_policy( self._stubs["get_network_policy"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/GetNetworkPolicy", request_serializer=vmwareengine.GetNetworkPolicyRequest.serialize, - response_deserializer=vmwareengine.NetworkPolicy.deserialize, + response_deserializer=vmwareengine_resources.NetworkPolicy.deserialize, ) return self._stubs["get_network_policy"] @@ -1104,7 +1111,8 @@ def delete_vmware_engine_network( def get_vmware_engine_network( self, ) -> Callable[ - [vmwareengine.GetVmwareEngineNetworkRequest], vmwareengine.VmwareEngineNetwork + [vmwareengine.GetVmwareEngineNetworkRequest], + vmwareengine_resources.VmwareEngineNetwork, ]: r"""Return a callable for the get vmware engine network method over gRPC. @@ -1128,7 +1136,7 @@ def get_vmware_engine_network( self._stubs["get_vmware_engine_network"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/GetVmwareEngineNetwork", request_serializer=vmwareengine.GetVmwareEngineNetworkRequest.serialize, - response_deserializer=vmwareengine.VmwareEngineNetwork.deserialize, + response_deserializer=vmwareengine_resources.VmwareEngineNetwork.deserialize, ) return self._stubs["get_vmware_engine_network"] diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc_asyncio.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc_asyncio.py index 03926778c20b..0fbcc9e488c9 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc_asyncio.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc_asyncio.py @@ -26,7 +26,7 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.vmwareengine_v1.types import vmwareengine +from google.cloud.vmwareengine_v1.types import vmwareengine, vmwareengine_resources from .base import DEFAULT_CLIENT_INFO, VmwareEngineTransport from .grpc import VmwareEngineGrpcTransport @@ -286,7 +286,8 @@ def list_private_clouds( def get_private_cloud( self, ) -> Callable[ - [vmwareengine.GetPrivateCloudRequest], Awaitable[vmwareengine.PrivateCloud] + [vmwareengine.GetPrivateCloudRequest], + Awaitable[vmwareengine_resources.PrivateCloud], ]: r"""Return a callable for the get private cloud method over gRPC. @@ -306,7 +307,7 @@ def get_private_cloud( self._stubs["get_private_cloud"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/GetPrivateCloud", request_serializer=vmwareengine.GetPrivateCloudRequest.serialize, - response_deserializer=vmwareengine.PrivateCloud.deserialize, + response_deserializer=vmwareengine_resources.PrivateCloud.deserialize, ) return self._stubs["get_private_cloud"] @@ -487,7 +488,9 @@ def list_clusters( @property def get_cluster( self, - ) -> Callable[[vmwareengine.GetClusterRequest], Awaitable[vmwareengine.Cluster]]: + ) -> Callable[ + [vmwareengine.GetClusterRequest], Awaitable[vmwareengine_resources.Cluster] + ]: r"""Return a callable for the get cluster method over gRPC. Retrieves a ``Cluster`` resource by its resource name. @@ -506,7 +509,7 @@ def get_cluster( self._stubs["get_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/GetCluster", request_serializer=vmwareengine.GetClusterRequest.serialize, - response_deserializer=vmwareengine.Cluster.deserialize, + response_deserializer=vmwareengine_resources.Cluster.deserialize, ) return self._stubs["get_cluster"] @@ -667,7 +670,9 @@ def list_node_types( @property def get_node_type( self, - ) -> Callable[[vmwareengine.GetNodeTypeRequest], Awaitable[vmwareengine.NodeType]]: + ) -> Callable[ + [vmwareengine.GetNodeTypeRequest], Awaitable[vmwareengine_resources.NodeType] + ]: r"""Return a callable for the get node type method over gRPC. Gets details of a single ``NodeType``. @@ -686,7 +691,7 @@ def get_node_type( self._stubs["get_node_type"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/GetNodeType", request_serializer=vmwareengine.GetNodeTypeRequest.serialize, - response_deserializer=vmwareengine.NodeType.deserialize, + response_deserializer=vmwareengine_resources.NodeType.deserialize, ) return self._stubs["get_node_type"] @@ -694,7 +699,8 @@ def get_node_type( def show_nsx_credentials( self, ) -> Callable[ - [vmwareengine.ShowNsxCredentialsRequest], Awaitable[vmwareengine.Credentials] + [vmwareengine.ShowNsxCredentialsRequest], + Awaitable[vmwareengine_resources.Credentials], ]: r"""Return a callable for the show nsx credentials method over gRPC. @@ -714,7 +720,7 @@ def show_nsx_credentials( self._stubs["show_nsx_credentials"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/ShowNsxCredentials", request_serializer=vmwareengine.ShowNsxCredentialsRequest.serialize, - response_deserializer=vmwareengine.Credentials.deserialize, + response_deserializer=vmwareengine_resources.Credentials.deserialize, ) return self._stubs["show_nsx_credentials"] @@ -723,7 +729,7 @@ def show_vcenter_credentials( self, ) -> Callable[ [vmwareengine.ShowVcenterCredentialsRequest], - Awaitable[vmwareengine.Credentials], + Awaitable[vmwareengine_resources.Credentials], ]: r"""Return a callable for the show vcenter credentials method over gRPC. @@ -743,7 +749,7 @@ def show_vcenter_credentials( self._stubs["show_vcenter_credentials"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/ShowVcenterCredentials", request_serializer=vmwareengine.ShowVcenterCredentialsRequest.serialize, - response_deserializer=vmwareengine.Credentials.deserialize, + response_deserializer=vmwareengine_resources.Credentials.deserialize, ) return self._stubs["show_vcenter_credentials"] @@ -868,7 +874,7 @@ def get_hcx_activation_key( self, ) -> Callable[ [vmwareengine.GetHcxActivationKeyRequest], - Awaitable[vmwareengine.HcxActivationKey], + Awaitable[vmwareengine_resources.HcxActivationKey], ]: r"""Return a callable for the get hcx activation key method over gRPC. @@ -888,7 +894,7 @@ def get_hcx_activation_key( self._stubs["get_hcx_activation_key"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/GetHcxActivationKey", request_serializer=vmwareengine.GetHcxActivationKeyRequest.serialize, - response_deserializer=vmwareengine.HcxActivationKey.deserialize, + response_deserializer=vmwareengine_resources.HcxActivationKey.deserialize, ) return self._stubs["get_hcx_activation_key"] @@ -896,7 +902,8 @@ def get_hcx_activation_key( def get_network_policy( self, ) -> Callable[ - [vmwareengine.GetNetworkPolicyRequest], Awaitable[vmwareengine.NetworkPolicy] + [vmwareengine.GetNetworkPolicyRequest], + Awaitable[vmwareengine_resources.NetworkPolicy], ]: r"""Return a callable for the get network policy method over gRPC. @@ -916,7 +923,7 @@ def get_network_policy( self._stubs["get_network_policy"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/GetNetworkPolicy", request_serializer=vmwareengine.GetNetworkPolicyRequest.serialize, - response_deserializer=vmwareengine.NetworkPolicy.deserialize, + response_deserializer=vmwareengine_resources.NetworkPolicy.deserialize, ) return self._stubs["get_network_policy"] @@ -1149,7 +1156,7 @@ def get_vmware_engine_network( self, ) -> Callable[ [vmwareengine.GetVmwareEngineNetworkRequest], - Awaitable[vmwareengine.VmwareEngineNetwork], + Awaitable[vmwareengine_resources.VmwareEngineNetwork], ]: r"""Return a callable for the get vmware engine network method over gRPC. @@ -1173,7 +1180,7 @@ def get_vmware_engine_network( self._stubs["get_vmware_engine_network"] = self.grpc_channel.unary_unary( "/google.cloud.vmwareengine.v1.VmwareEngine/GetVmwareEngineNetwork", request_serializer=vmwareengine.GetVmwareEngineNetworkRequest.serialize, - response_deserializer=vmwareengine.VmwareEngineNetwork.deserialize, + response_deserializer=vmwareengine_resources.VmwareEngineNetwork.deserialize, ) return self._stubs["get_vmware_engine_network"] diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/rest.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/rest.py index 4da0de23edf8..759175f11619 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/rest.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/rest.py @@ -17,7 +17,7 @@ import dataclasses import json # type: ignore import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings from google.api_core import ( @@ -48,7 +48,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.cloud.vmwareengine_v1.types import vmwareengine +from google.cloud.vmwareengine_v1.types import vmwareengine, vmwareengine_resources from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import VmwareEngineTransport @@ -552,7 +552,9 @@ def pre_get_cluster( """ return request, metadata - def post_get_cluster(self, response: vmwareengine.Cluster) -> vmwareengine.Cluster: + def post_get_cluster( + self, response: vmwareengine_resources.Cluster + ) -> vmwareengine_resources.Cluster: """Post-rpc interceptor for get_cluster Override in a subclass to manipulate the response @@ -574,8 +576,8 @@ def pre_get_hcx_activation_key( return request, metadata def post_get_hcx_activation_key( - self, response: vmwareengine.HcxActivationKey - ) -> vmwareengine.HcxActivationKey: + self, response: vmwareengine_resources.HcxActivationKey + ) -> vmwareengine_resources.HcxActivationKey: """Post-rpc interceptor for get_hcx_activation_key Override in a subclass to manipulate the response @@ -597,8 +599,8 @@ def pre_get_network_policy( return request, metadata def post_get_network_policy( - self, response: vmwareengine.NetworkPolicy - ) -> vmwareengine.NetworkPolicy: + self, response: vmwareengine_resources.NetworkPolicy + ) -> vmwareengine_resources.NetworkPolicy: """Post-rpc interceptor for get_network_policy Override in a subclass to manipulate the response @@ -620,8 +622,8 @@ def pre_get_node_type( return request, metadata def post_get_node_type( - self, response: vmwareengine.NodeType - ) -> vmwareengine.NodeType: + self, response: vmwareengine_resources.NodeType + ) -> vmwareengine_resources.NodeType: """Post-rpc interceptor for get_node_type Override in a subclass to manipulate the response @@ -643,8 +645,8 @@ def pre_get_private_cloud( return request, metadata def post_get_private_cloud( - self, response: vmwareengine.PrivateCloud - ) -> vmwareengine.PrivateCloud: + self, response: vmwareengine_resources.PrivateCloud + ) -> vmwareengine_resources.PrivateCloud: """Post-rpc interceptor for get_private_cloud Override in a subclass to manipulate the response @@ -666,8 +668,8 @@ def pre_get_vmware_engine_network( return request, metadata def post_get_vmware_engine_network( - self, response: vmwareengine.VmwareEngineNetwork - ) -> vmwareengine.VmwareEngineNetwork: + self, response: vmwareengine_resources.VmwareEngineNetwork + ) -> vmwareengine_resources.VmwareEngineNetwork: """Post-rpc interceptor for get_vmware_engine_network Override in a subclass to manipulate the response @@ -896,8 +898,8 @@ def pre_show_nsx_credentials( return request, metadata def post_show_nsx_credentials( - self, response: vmwareengine.Credentials - ) -> vmwareengine.Credentials: + self, response: vmwareengine_resources.Credentials + ) -> vmwareengine_resources.Credentials: """Post-rpc interceptor for show_nsx_credentials Override in a subclass to manipulate the response @@ -919,8 +921,8 @@ def pre_show_vcenter_credentials( return request, metadata def post_show_vcenter_credentials( - self, response: vmwareengine.Credentials - ) -> vmwareengine.Credentials: + self, response: vmwareengine_resources.Credentials + ) -> vmwareengine_resources.Credentials: """Post-rpc interceptor for show_vcenter_credentials Override in a subclass to manipulate the response @@ -1050,7 +1052,7 @@ def pre_get_location( self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]], - ) -> locations_pb2.Location: + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -1059,7 +1061,7 @@ def pre_get_location( return request, metadata def post_get_location( - self, response: locations_pb2.GetLocationRequest + self, response: locations_pb2.Location ) -> locations_pb2.Location: """Post-rpc interceptor for get_location @@ -1073,7 +1075,7 @@ def pre_list_locations( self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]], - ) -> locations_pb2.ListLocationsResponse: + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -1082,7 +1084,7 @@ def pre_list_locations( return request, metadata def post_list_locations( - self, response: locations_pb2.ListLocationsRequest + self, response: locations_pb2.ListLocationsResponse ) -> locations_pb2.ListLocationsResponse: """Post-rpc interceptor for list_locations @@ -1096,7 +1098,7 @@ def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]], - ) -> policy_pb2.Policy: + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_iam_policy Override in a subclass to manipulate the request or metadata @@ -1104,9 +1106,7 @@ def pre_get_iam_policy( """ return request, metadata - def post_get_iam_policy( - self, response: iam_policy_pb2.GetIamPolicyRequest - ) -> policy_pb2.Policy: + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for get_iam_policy Override in a subclass to manipulate the response @@ -1119,7 +1119,7 @@ def pre_set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]], - ) -> policy_pb2.Policy: + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for set_iam_policy Override in a subclass to manipulate the request or metadata @@ -1127,9 +1127,7 @@ def pre_set_iam_policy( """ return request, metadata - def post_set_iam_policy( - self, response: iam_policy_pb2.SetIamPolicyRequest - ) -> policy_pb2.Policy: + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for set_iam_policy Override in a subclass to manipulate the response @@ -1142,7 +1140,7 @@ def pre_test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]], - ) -> iam_policy_pb2.TestIamPermissionsResponse: + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for test_iam_permissions Override in a subclass to manipulate the request or metadata @@ -1151,7 +1149,7 @@ def pre_test_iam_permissions( return request, metadata def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsRequest + self, response: iam_policy_pb2.TestIamPermissionsResponse ) -> iam_policy_pb2.TestIamPermissionsResponse: """Post-rpc interceptor for test_iam_permissions @@ -1165,7 +1163,7 @@ def pre_delete_operation( self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]], - ) -> None: + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -1173,9 +1171,7 @@ def pre_delete_operation( """ return request, metadata - def post_delete_operation( - self, response: operations_pb2.DeleteOperationRequest - ) -> None: + def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -1188,7 +1184,7 @@ def pre_get_operation( self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]], - ) -> operations_pb2.Operation: + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -1197,7 +1193,7 @@ def pre_get_operation( return request, metadata def post_get_operation( - self, response: operations_pb2.GetOperationRequest + self, response: operations_pb2.Operation ) -> operations_pb2.Operation: """Post-rpc interceptor for get_operation @@ -1211,7 +1207,7 @@ def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]], - ) -> operations_pb2.ListOperationsResponse: + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -1220,7 +1216,7 @@ def pre_list_operations( return request, metadata def post_list_operations( - self, response: operations_pb2.ListOperationsRequest + self, response: operations_pb2.ListOperationsResponse ) -> operations_pb2.ListOperationsResponse: """Post-rpc interceptor for list_operations @@ -1378,7 +1374,7 @@ class _CreateCluster(VmwareEngineRestStub): def __hash__(self): return hash("CreateCluster") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "clusterId": "", } @@ -1404,7 +1400,6 @@ def __call__( request (~.vmwareengine.CreateClusterRequest): The request object. Request message for [VmwareEngine.CreateCluster][google.cloud.vmwareengine.v1.VmwareEngine.CreateCluster] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1478,7 +1473,7 @@ class _CreateHcxActivationKey(VmwareEngineRestStub): def __hash__(self): return hash("CreateHcxActivationKey") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "hcxActivationKeyId": "", } @@ -1504,7 +1499,6 @@ def __call__( request (~.vmwareengine.CreateHcxActivationKeyRequest): The request object. Request message for [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1580,7 +1574,7 @@ class _CreateNetworkPolicy(VmwareEngineRestStub): def __hash__(self): return hash("CreateNetworkPolicy") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "networkPolicyId": "", } @@ -1606,7 +1600,6 @@ def __call__( request (~.vmwareengine.CreateNetworkPolicyRequest): The request object. Request message for [VmwareEngine.CreateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPolicy] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1682,7 +1675,7 @@ class _CreatePrivateCloud(VmwareEngineRestStub): def __hash__(self): return hash("CreatePrivateCloud") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "privateCloudId": "", } @@ -1708,7 +1701,6 @@ def __call__( request (~.vmwareengine.CreatePrivateCloudRequest): The request object. Request message for [VmwareEngine.CreatePrivateCloud][google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateCloud] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1784,7 +1776,7 @@ class _CreateVmwareEngineNetwork(VmwareEngineRestStub): def __hash__(self): return hash("CreateVmwareEngineNetwork") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "vmwareEngineNetworkId": "", } @@ -1811,7 +1803,6 @@ def __call__( request (~.vmwareengine.CreateVmwareEngineNetworkRequest): The request object. Request message for [VmwareEngine.CreateVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.CreateVmwareEngineNetwork] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1887,7 +1878,7 @@ class _DeleteCluster(VmwareEngineRestStub): def __hash__(self): return hash("DeleteCluster") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1911,7 +1902,6 @@ def __call__( request (~.vmwareengine.DeleteClusterRequest): The request object. Request message for [VmwareEngine.DeleteCluster][google.cloud.vmwareengine.v1.VmwareEngine.DeleteCluster] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1976,7 +1966,7 @@ class _DeleteNetworkPolicy(VmwareEngineRestStub): def __hash__(self): return hash("DeleteNetworkPolicy") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2000,7 +1990,6 @@ def __call__( request (~.vmwareengine.DeleteNetworkPolicyRequest): The request object. Request message for [VmwareEngine.DeleteNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPolicy] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2067,7 +2056,7 @@ class _DeletePrivateCloud(VmwareEngineRestStub): def __hash__(self): return hash("DeletePrivateCloud") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2091,7 +2080,6 @@ def __call__( request (~.vmwareengine.DeletePrivateCloudRequest): The request object. Request message for [VmwareEngine.DeletePrivateCloud][google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateCloud] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2158,7 +2146,7 @@ class _DeleteVmwareEngineNetwork(VmwareEngineRestStub): def __hash__(self): return hash("DeleteVmwareEngineNetwork") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2183,7 +2171,6 @@ def __call__( request (~.vmwareengine.DeleteVmwareEngineNetworkRequest): The request object. Request message for [VmwareEngine.DeleteVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.DeleteVmwareEngineNetwork] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2250,7 +2237,7 @@ class _GetCluster(VmwareEngineRestStub): def __hash__(self): return hash("GetCluster") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2267,14 +2254,13 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.Cluster: + ) -> vmwareengine_resources.Cluster: r"""Call the get cluster method over HTTP. Args: request (~.vmwareengine.GetClusterRequest): The request object. Request message for [VmwareEngine.GetCluster][google.cloud.vmwareengine.v1.VmwareEngine.GetCluster] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2282,7 +2268,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine.Cluster: + ~.vmwareengine_resources.Cluster: A cluster in a private cloud. """ @@ -2327,8 +2313,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.Cluster() - pb_resp = vmwareengine.Cluster.pb(resp) + resp = vmwareengine_resources.Cluster() + pb_resp = vmwareengine_resources.Cluster.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_cluster(resp) @@ -2338,7 +2324,7 @@ class _GetHcxActivationKey(VmwareEngineRestStub): def __hash__(self): return hash("GetHcxActivationKey") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2355,14 +2341,13 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.HcxActivationKey: + ) -> vmwareengine_resources.HcxActivationKey: r"""Call the get hcx activation key method over HTTP. Args: request (~.vmwareengine.GetHcxActivationKeyRequest): The request object. Request message for [VmwareEngine.GetHcxActivationKeys][] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2370,7 +2355,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine.HcxActivationKey: + ~.vmwareengine_resources.HcxActivationKey: HCX activation key. A default key is created during private cloud provisioning, but this behavior is subject to change and you should always verify active keys. Use @@ -2424,8 +2409,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.HcxActivationKey() - pb_resp = vmwareengine.HcxActivationKey.pb(resp) + resp = vmwareengine_resources.HcxActivationKey() + pb_resp = vmwareengine_resources.HcxActivationKey.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_hcx_activation_key(resp) @@ -2435,7 +2420,7 @@ class _GetNetworkPolicy(VmwareEngineRestStub): def __hash__(self): return hash("GetNetworkPolicy") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2452,14 +2437,13 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.NetworkPolicy: + ) -> vmwareengine_resources.NetworkPolicy: r"""Call the get network policy method over HTTP. Args: request (~.vmwareengine.GetNetworkPolicyRequest): The request object. Request message for [VmwareEngine.GetNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPolicy] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2467,7 +2451,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine.NetworkPolicy: + ~.vmwareengine_resources.NetworkPolicy: Represents a network policy resource. Network policies are regional resources. You can use a network policy to enable @@ -2525,8 +2509,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.NetworkPolicy() - pb_resp = vmwareengine.NetworkPolicy.pb(resp) + resp = vmwareengine_resources.NetworkPolicy() + pb_resp = vmwareengine_resources.NetworkPolicy.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_network_policy(resp) @@ -2536,7 +2520,7 @@ class _GetNodeType(VmwareEngineRestStub): def __hash__(self): return hash("GetNodeType") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2553,14 +2537,13 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.NodeType: + ) -> vmwareengine_resources.NodeType: r"""Call the get node type method over HTTP. Args: request (~.vmwareengine.GetNodeTypeRequest): The request object. Request message for [VmwareEngine.GetNodeType][google.cloud.vmwareengine.v1.VmwareEngine.GetNodeType] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2568,7 +2551,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine.NodeType: + ~.vmwareengine_resources.NodeType: Describes node type. """ @@ -2613,8 +2596,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.NodeType() - pb_resp = vmwareengine.NodeType.pb(resp) + resp = vmwareengine_resources.NodeType() + pb_resp = vmwareengine_resources.NodeType.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_node_type(resp) @@ -2624,7 +2607,7 @@ class _GetPrivateCloud(VmwareEngineRestStub): def __hash__(self): return hash("GetPrivateCloud") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2641,14 +2624,13 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.PrivateCloud: + ) -> vmwareengine_resources.PrivateCloud: r"""Call the get private cloud method over HTTP. Args: request (~.vmwareengine.GetPrivateCloudRequest): The request object. Request message for [VmwareEngine.GetPrivateCloud][google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateCloud] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2656,7 +2638,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine.PrivateCloud: + ~.vmwareengine_resources.PrivateCloud: Represents a private cloud resource. Private clouds are zonal resources. @@ -2705,8 +2687,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.PrivateCloud() - pb_resp = vmwareengine.PrivateCloud.pb(resp) + resp = vmwareengine_resources.PrivateCloud() + pb_resp = vmwareengine_resources.PrivateCloud.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_private_cloud(resp) @@ -2716,7 +2698,7 @@ class _GetVmwareEngineNetwork(VmwareEngineRestStub): def __hash__(self): return hash("GetVmwareEngineNetwork") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2733,14 +2715,13 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.VmwareEngineNetwork: + ) -> vmwareengine_resources.VmwareEngineNetwork: r"""Call the get vmware engine network method over HTTP. Args: request (~.vmwareengine.GetVmwareEngineNetworkRequest): The request object. Request message for [VmwareEngine.GetVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.GetVmwareEngineNetwork] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2748,7 +2729,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine.VmwareEngineNetwork: + ~.vmwareengine_resources.VmwareEngineNetwork: VMware Engine network resource that provides connectivity for VMware Engine private clouds. @@ -2798,8 +2779,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.VmwareEngineNetwork() - pb_resp = vmwareengine.VmwareEngineNetwork.pb(resp) + resp = vmwareengine_resources.VmwareEngineNetwork() + pb_resp = vmwareengine_resources.VmwareEngineNetwork.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_vmware_engine_network(resp) @@ -2809,7 +2790,7 @@ class _ListClusters(VmwareEngineRestStub): def __hash__(self): return hash("ListClusters") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2833,7 +2814,6 @@ def __call__( request (~.vmwareengine.ListClustersRequest): The request object. Request message for [VmwareEngine.ListClusters][google.cloud.vmwareengine.v1.VmwareEngine.ListClusters] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2899,7 +2879,7 @@ class _ListHcxActivationKeys(VmwareEngineRestStub): def __hash__(self): return hash("ListHcxActivationKeys") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2923,7 +2903,6 @@ def __call__( request (~.vmwareengine.ListHcxActivationKeysRequest): The request object. Request message for [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2991,7 +2970,7 @@ class _ListNetworkPolicies(VmwareEngineRestStub): def __hash__(self): return hash("ListNetworkPolicies") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3015,7 +2994,6 @@ def __call__( request (~.vmwareengine.ListNetworkPoliciesRequest): The request object. Request message for [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3083,7 +3061,7 @@ class _ListNodeTypes(VmwareEngineRestStub): def __hash__(self): return hash("ListNodeTypes") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3107,7 +3085,6 @@ def __call__( request (~.vmwareengine.ListNodeTypesRequest): The request object. Request message for [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3173,7 +3150,7 @@ class _ListPrivateClouds(VmwareEngineRestStub): def __hash__(self): return hash("ListPrivateClouds") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3197,7 +3174,6 @@ def __call__( request (~.vmwareengine.ListPrivateCloudsRequest): The request object. Request message for [VmwareEngine.ListPrivateClouds][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateClouds] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3265,7 +3241,7 @@ class _ListSubnets(VmwareEngineRestStub): def __hash__(self): return hash("ListSubnets") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3289,7 +3265,6 @@ def __call__( request (~.vmwareengine.ListSubnetsRequest): The request object. Request message for [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3355,7 +3330,7 @@ class _ListVmwareEngineNetworks(VmwareEngineRestStub): def __hash__(self): return hash("ListVmwareEngineNetworks") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3380,7 +3355,6 @@ def __call__( request (~.vmwareengine.ListVmwareEngineNetworksRequest): The request object. Request message for [VmwareEngine.ListVmwareEngineNetworks][google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3448,7 +3422,7 @@ class _ResetNsxCredentials(VmwareEngineRestStub): def __hash__(self): return hash("ResetNsxCredentials") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3472,7 +3446,6 @@ def __call__( request (~.vmwareengine.ResetNsxCredentialsRequest): The request object. Request message for [VmwareEngine.ResetNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetNsxCredentials] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3548,7 +3521,7 @@ class _ResetVcenterCredentials(VmwareEngineRestStub): def __hash__(self): return hash("ResetVcenterCredentials") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3572,7 +3545,6 @@ def __call__( request (~.vmwareengine.ResetVcenterCredentialsRequest): The request object. Request message for [VmwareEngine.ResetVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetVcenterCredentials] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3648,7 +3620,7 @@ class _ShowNsxCredentials(VmwareEngineRestStub): def __hash__(self): return hash("ShowNsxCredentials") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3665,14 +3637,13 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.Credentials: + ) -> vmwareengine_resources.Credentials: r"""Call the show nsx credentials method over HTTP. Args: request (~.vmwareengine.ShowNsxCredentialsRequest): The request object. Request message for [VmwareEngine.ShowNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowNsxCredentials] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3680,7 +3651,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine.Credentials: + ~.vmwareengine_resources.Credentials: Credentials for a private cloud. """ @@ -3727,8 +3698,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.Credentials() - pb_resp = vmwareengine.Credentials.pb(resp) + resp = vmwareengine_resources.Credentials() + pb_resp = vmwareengine_resources.Credentials.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_show_nsx_credentials(resp) @@ -3738,7 +3709,7 @@ class _ShowVcenterCredentials(VmwareEngineRestStub): def __hash__(self): return hash("ShowVcenterCredentials") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3755,14 +3726,13 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.Credentials: + ) -> vmwareengine_resources.Credentials: r"""Call the show vcenter credentials method over HTTP. Args: request (~.vmwareengine.ShowVcenterCredentialsRequest): The request object. Request message for [VmwareEngine.ShowVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowVcenterCredentials] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3770,7 +3740,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine.Credentials: + ~.vmwareengine_resources.Credentials: Credentials for a private cloud. """ @@ -3817,8 +3787,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.Credentials() - pb_resp = vmwareengine.Credentials.pb(resp) + resp = vmwareengine_resources.Credentials() + pb_resp = vmwareengine_resources.Credentials.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_show_vcenter_credentials(resp) @@ -3828,7 +3798,7 @@ class _UndeletePrivateCloud(VmwareEngineRestStub): def __hash__(self): return hash("UndeletePrivateCloud") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3852,7 +3822,6 @@ def __call__( request (~.vmwareengine.UndeletePrivateCloudRequest): The request object. Request message for [VmwareEngine.UndeletePrivateCloud][google.cloud.vmwareengine.v1.VmwareEngine.UndeletePrivateCloud] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3928,7 +3897,7 @@ class _UpdateCluster(VmwareEngineRestStub): def __hash__(self): return hash("UpdateCluster") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "updateMask": {}, } @@ -3954,7 +3923,6 @@ def __call__( request (~.vmwareengine.UpdateClusterRequest): The request object. Request message for [VmwareEngine.UpdateCluster][google.cloud.vmwareengine.v1.VmwareEngine.UpdateCluster] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4028,7 +3996,7 @@ class _UpdateNetworkPolicy(VmwareEngineRestStub): def __hash__(self): return hash("UpdateNetworkPolicy") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "updateMask": {}, } @@ -4054,7 +4022,6 @@ def __call__( request (~.vmwareengine.UpdateNetworkPolicyRequest): The request object. Request message for [VmwareEngine.UpdateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.UpdateNetworkPolicy] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4130,7 +4097,7 @@ class _UpdatePrivateCloud(VmwareEngineRestStub): def __hash__(self): return hash("UpdatePrivateCloud") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "updateMask": {}, } @@ -4156,7 +4123,6 @@ def __call__( request (~.vmwareengine.UpdatePrivateCloudRequest): The request object. Request message for [VmwareEngine.UpdatePrivateCloud][google.cloud.vmwareengine.v1.VmwareEngine.UpdatePrivateCloud] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4232,7 +4198,7 @@ class _UpdateVmwareEngineNetwork(VmwareEngineRestStub): def __hash__(self): return hash("UpdateVmwareEngineNetwork") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "updateMask": {}, } @@ -4259,7 +4225,6 @@ def __call__( request (~.vmwareengine.UpdateVmwareEngineNetworkRequest): The request object. Request message for [VmwareEngine.UpdateVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.UpdateVmwareEngineNetwork] - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4412,7 +4377,7 @@ def delete_vmware_engine_network( @property def get_cluster( self, - ) -> Callable[[vmwareengine.GetClusterRequest], vmwareengine.Cluster]: + ) -> Callable[[vmwareengine.GetClusterRequest], vmwareengine_resources.Cluster]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetCluster(self._session, self._host, self._interceptor) # type: ignore @@ -4421,7 +4386,8 @@ def get_cluster( def get_hcx_activation_key( self, ) -> Callable[ - [vmwareengine.GetHcxActivationKeyRequest], vmwareengine.HcxActivationKey + [vmwareengine.GetHcxActivationKeyRequest], + vmwareengine_resources.HcxActivationKey, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast @@ -4430,7 +4396,9 @@ def get_hcx_activation_key( @property def get_network_policy( self, - ) -> Callable[[vmwareengine.GetNetworkPolicyRequest], vmwareengine.NetworkPolicy]: + ) -> Callable[ + [vmwareengine.GetNetworkPolicyRequest], vmwareengine_resources.NetworkPolicy + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetNetworkPolicy(self._session, self._host, self._interceptor) # type: ignore @@ -4438,7 +4406,7 @@ def get_network_policy( @property def get_node_type( self, - ) -> Callable[[vmwareengine.GetNodeTypeRequest], vmwareengine.NodeType]: + ) -> Callable[[vmwareengine.GetNodeTypeRequest], vmwareengine_resources.NodeType]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetNodeType(self._session, self._host, self._interceptor) # type: ignore @@ -4446,7 +4414,9 @@ def get_node_type( @property def get_private_cloud( self, - ) -> Callable[[vmwareengine.GetPrivateCloudRequest], vmwareengine.PrivateCloud]: + ) -> Callable[ + [vmwareengine.GetPrivateCloudRequest], vmwareengine_resources.PrivateCloud + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetPrivateCloud(self._session, self._host, self._interceptor) # type: ignore @@ -4455,7 +4425,8 @@ def get_private_cloud( def get_vmware_engine_network( self, ) -> Callable[ - [vmwareengine.GetVmwareEngineNetworkRequest], vmwareengine.VmwareEngineNetwork + [vmwareengine.GetVmwareEngineNetworkRequest], + vmwareengine_resources.VmwareEngineNetwork, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast @@ -4553,7 +4524,9 @@ def reset_vcenter_credentials( @property def show_nsx_credentials( self, - ) -> Callable[[vmwareengine.ShowNsxCredentialsRequest], vmwareengine.Credentials]: + ) -> Callable[ + [vmwareengine.ShowNsxCredentialsRequest], vmwareengine_resources.Credentials + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._ShowNsxCredentials(self._session, self._host, self._interceptor) # type: ignore @@ -4562,7 +4535,7 @@ def show_nsx_credentials( def show_vcenter_credentials( self, ) -> Callable[ - [vmwareengine.ShowVcenterCredentialsRequest], vmwareengine.Credentials + [vmwareengine.ShowVcenterCredentialsRequest], vmwareengine_resources.Credentials ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/__init__.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/__init__.py index e304cdd952e1..2c93b9370387 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/__init__.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/__init__.py @@ -14,13 +14,11 @@ # limitations under the License. # from .vmwareengine import ( - Cluster, CreateClusterRequest, CreateHcxActivationKeyRequest, CreateNetworkPolicyRequest, CreatePrivateCloudRequest, CreateVmwareEngineNetworkRequest, - Credentials, DeleteClusterRequest, DeleteNetworkPolicyRequest, DeletePrivateCloudRequest, @@ -31,8 +29,6 @@ GetNodeTypeRequest, GetPrivateCloudRequest, GetVmwareEngineNetworkRequest, - Hcx, - HcxActivationKey, ListClustersRequest, ListClustersResponse, ListHcxActivationKeysRequest, @@ -47,35 +43,39 @@ ListSubnetsResponse, ListVmwareEngineNetworksRequest, ListVmwareEngineNetworksResponse, - NetworkConfig, - NetworkPolicy, - NodeType, - NodeTypeConfig, - Nsx, OperationMetadata, - PrivateCloud, ResetNsxCredentialsRequest, ResetVcenterCredentialsRequest, ShowNsxCredentialsRequest, ShowVcenterCredentialsRequest, - Subnet, UndeletePrivateCloudRequest, UpdateClusterRequest, UpdateNetworkPolicyRequest, UpdatePrivateCloudRequest, UpdateVmwareEngineNetworkRequest, +) +from .vmwareengine_resources import ( + Cluster, + Credentials, + Hcx, + HcxActivationKey, + NetworkConfig, + NetworkPolicy, + NodeType, + NodeTypeConfig, + Nsx, + PrivateCloud, + Subnet, Vcenter, VmwareEngineNetwork, ) __all__ = ( - "Cluster", "CreateClusterRequest", "CreateHcxActivationKeyRequest", "CreateNetworkPolicyRequest", "CreatePrivateCloudRequest", "CreateVmwareEngineNetworkRequest", - "Credentials", "DeleteClusterRequest", "DeleteNetworkPolicyRequest", "DeletePrivateCloudRequest", @@ -86,8 +86,6 @@ "GetNodeTypeRequest", "GetPrivateCloudRequest", "GetVmwareEngineNetworkRequest", - "Hcx", - "HcxActivationKey", "ListClustersRequest", "ListClustersResponse", "ListHcxActivationKeysRequest", @@ -102,23 +100,27 @@ "ListSubnetsResponse", "ListVmwareEngineNetworksRequest", "ListVmwareEngineNetworksResponse", - "NetworkConfig", - "NetworkPolicy", - "NodeType", - "NodeTypeConfig", - "Nsx", "OperationMetadata", - "PrivateCloud", "ResetNsxCredentialsRequest", "ResetVcenterCredentialsRequest", "ShowNsxCredentialsRequest", "ShowVcenterCredentialsRequest", - "Subnet", "UndeletePrivateCloudRequest", "UpdateClusterRequest", "UpdateNetworkPolicyRequest", "UpdatePrivateCloudRequest", "UpdateVmwareEngineNetworkRequest", + "Cluster", + "Credentials", + "Hcx", + "HcxActivationKey", + "NetworkConfig", + "NetworkPolicy", + "NodeType", + "NodeTypeConfig", + "Nsx", + "PrivateCloud", + "Subnet", "Vcenter", "VmwareEngineNetwork", ) diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/vmwareengine.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/vmwareengine.py index 0de0990b9a97..7bc03b54c594 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/vmwareengine.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/vmwareengine.py @@ -13,18 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore +from google.cloud.vmwareengine_v1.types import vmwareengine_resources + __protobuf__ = proto.module( package="google.cloud.vmwareengine.v1", manifest={ - "NetworkConfig", - "NodeTypeConfig", - "PrivateCloud", "ListPrivateCloudsRequest", "ListPrivateCloudsResponse", "GetPrivateCloudRequest", @@ -32,42 +33,32 @@ "UpdatePrivateCloudRequest", "DeletePrivateCloudRequest", "UndeletePrivateCloudRequest", - "Cluster", "ListClustersRequest", "ListClustersResponse", "GetClusterRequest", "CreateClusterRequest", "UpdateClusterRequest", "DeleteClusterRequest", - "Subnet", "ListSubnetsRequest", "ListSubnetsResponse", "OperationMetadata", - "NodeType", "ListNodeTypesRequest", "ListNodeTypesResponse", "GetNodeTypeRequest", - "Credentials", "ShowNsxCredentialsRequest", "ShowVcenterCredentialsRequest", "ResetNsxCredentialsRequest", "ResetVcenterCredentialsRequest", "ListHcxActivationKeysResponse", - "HcxActivationKey", "ListHcxActivationKeysRequest", "GetHcxActivationKeyRequest", "CreateHcxActivationKeyRequest", - "Hcx", - "Nsx", - "Vcenter", - "NetworkPolicy", "ListNetworkPoliciesRequest", "ListNetworkPoliciesResponse", "GetNetworkPolicyRequest", "UpdateNetworkPolicyRequest", "CreateNetworkPolicyRequest", "DeleteNetworkPolicyRequest", - "VmwareEngineNetwork", "CreateVmwareEngineNetworkRequest", "UpdateVmwareEngineNetworkRequest", "DeleteVmwareEngineNetworkRequest", @@ -78,263 +69,6 @@ ) -class NetworkConfig(proto.Message): - r"""Network configuration in the consumer project - with which the peering has to be done. - - Attributes: - management_cidr (str): - Required. Management CIDR used by VMware - management appliances. - vmware_engine_network (str): - Optional. The relative resource name of the VMware Engine - network attached to the private cloud. Specify the name in - the following form: - ``projects/{project}/locations/{location}/vmwareEngineNetworks/{vmware_engine_network_id}`` - where ``{project}`` can either be a project number or a - project ID. - vmware_engine_network_canonical (str): - Output only. The canonical name of the VMware Engine network - in the form: - ``projects/{project_number}/locations/{location}/vmwareEngineNetworks/{vmware_engine_network_id}`` - management_ip_address_layout_version (int): - Output only. The IP address layout version of the management - IP address range. Possible versions include: - - - ``managementIpAddressLayoutVersion=1``: Indicates the - legacy IP address layout used by some existing private - clouds. This is no longer supported for new private - clouds as it does not support all features. - - ``managementIpAddressLayoutVersion=2``: Indicates the - latest IP address layout used by all newly created - private clouds. This version supports all current - features. - """ - - management_cidr: str = proto.Field( - proto.STRING, - number=4, - ) - vmware_engine_network: str = proto.Field( - proto.STRING, - number=5, - ) - vmware_engine_network_canonical: str = proto.Field( - proto.STRING, - number=6, - ) - management_ip_address_layout_version: int = proto.Field( - proto.INT32, - number=8, - ) - - -class NodeTypeConfig(proto.Message): - r"""Information about the type and number of nodes associated - with the cluster. - - Attributes: - node_count (int): - Required. The number of nodes of this type in - the cluster - custom_core_count (int): - Optional. Customized number of cores available to each node - of the type. This number must always be one of - ``nodeType.availableCustomCoreCounts``. If zero is provided - max value from ``nodeType.availableCustomCoreCounts`` will - be used. - """ - - node_count: int = proto.Field( - proto.INT32, - number=1, - ) - custom_core_count: int = proto.Field( - proto.INT32, - number=2, - ) - - -class PrivateCloud(proto.Message): - r"""Represents a private cloud resource. Private clouds are zonal - resources. - - Attributes: - name (str): - Output only. The resource name of this private cloud. - Resource names are schemeless URIs that follow the - conventions in - https://cloud.google.com/apis/design/resource_names. For - example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Creation time of this resource. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Last update time of this - resource. - delete_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the resource was - scheduled for deletion. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the resource will be - irreversibly deleted. - state (google.cloud.vmwareengine_v1.types.PrivateCloud.State): - Output only. State of the resource. New - values may be added to this enum when - appropriate. - network_config (google.cloud.vmwareengine_v1.types.NetworkConfig): - Required. Network configuration of the - private cloud. - management_cluster (google.cloud.vmwareengine_v1.types.PrivateCloud.ManagementCluster): - Input only. The management cluster for this private cloud. - This field is required during creation of the private cloud - to provide details for the default cluster. - - The following fields can't be changed after private cloud - creation: ``ManagementCluster.clusterId``, - ``ManagementCluster.nodeTypeId``. - description (str): - User-provided description for this private - cloud. - hcx (google.cloud.vmwareengine_v1.types.Hcx): - Output only. HCX appliance. - nsx (google.cloud.vmwareengine_v1.types.Nsx): - Output only. NSX appliance. - vcenter (google.cloud.vmwareengine_v1.types.Vcenter): - Output only. Vcenter appliance. - uid (str): - Output only. System-generated unique - identifier for the resource. - """ - - class State(proto.Enum): - r"""Enum State defines possible states of private clouds. - - Values: - STATE_UNSPECIFIED (0): - The default value. This value should never be - used. - ACTIVE (1): - The private cloud is ready. - CREATING (2): - The private cloud is being created. - UPDATING (3): - The private cloud is being updated. - FAILED (5): - The private cloud is in failed state. - DELETED (6): - The private cloud is scheduled for deletion. - The deletion process can be cancelled by using - the corresponding undelete method. - PURGING (7): - The private cloud is irreversibly deleted and - is being removed from the system. - """ - STATE_UNSPECIFIED = 0 - ACTIVE = 1 - CREATING = 2 - UPDATING = 3 - FAILED = 5 - DELETED = 6 - PURGING = 7 - - class ManagementCluster(proto.Message): - r"""Management cluster configuration. - - Attributes: - cluster_id (str): - Required. The user-provided identifier of the new - ``Cluster``. The identifier must meet the following - requirements: - - - Only contains 1-63 alphanumeric characters and hyphens - - Begins with an alphabetical character - - Ends with a non-hyphen character - - Not formatted as a UUID - - Complies with `RFC - 1034 `__ - (section 3.5) - node_type_configs (MutableMapping[str, google.cloud.vmwareengine_v1.types.NodeTypeConfig]): - Required. The map of cluster node types in this cluster, - where the key is canonical identifier of the node type - (corresponds to the ``NodeType``). - """ - - cluster_id: str = proto.Field( - proto.STRING, - number=1, - ) - node_type_configs: MutableMapping[str, "NodeTypeConfig"] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=7, - message="NodeTypeConfig", - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - delete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=8, - enum=State, - ) - network_config: "NetworkConfig" = proto.Field( - proto.MESSAGE, - number=9, - message="NetworkConfig", - ) - management_cluster: ManagementCluster = proto.Field( - proto.MESSAGE, - number=10, - message=ManagementCluster, - ) - description: str = proto.Field( - proto.STRING, - number=11, - ) - hcx: "Hcx" = proto.Field( - proto.MESSAGE, - number=17, - message="Hcx", - ) - nsx: "Nsx" = proto.Field( - proto.MESSAGE, - number=18, - message="Nsx", - ) - vcenter: "Vcenter" = proto.Field( - proto.MESSAGE, - number=19, - message="Vcenter", - ) - uid: str = proto.Field( - proto.STRING, - number=20, - ) - - class ListPrivateCloudsRequest(proto.Message): r"""Request message for [VmwareEngine.ListPrivateClouds][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateClouds] @@ -443,10 +177,12 @@ class ListPrivateCloudsResponse(proto.Message): def raw_page(self): return self - private_clouds: MutableSequence["PrivateCloud"] = proto.RepeatedField( + private_clouds: MutableSequence[ + vmwareengine_resources.PrivateCloud + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message="PrivateCloud", + message=vmwareengine_resources.PrivateCloud, ) next_page_token: str = proto.Field( proto.STRING, @@ -524,10 +260,10 @@ class CreatePrivateCloudRequest(proto.Message): proto.STRING, number=2, ) - private_cloud: "PrivateCloud" = proto.Field( + private_cloud: vmwareengine_resources.PrivateCloud = proto.Field( proto.MESSAGE, number=3, - message="PrivateCloud", + message=vmwareengine_resources.PrivateCloud, ) request_id: str = proto.Field( proto.STRING, @@ -560,10 +296,10 @@ class UpdatePrivateCloudRequest(proto.Message): (00000000-0000-0000-0000-000000000000). """ - private_cloud: "PrivateCloud" = proto.Field( + private_cloud: vmwareengine_resources.PrivateCloud = proto.Field( proto.MESSAGE, number=1, - message="PrivateCloud", + message=vmwareengine_resources.PrivateCloud, ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -668,100 +404,6 @@ class UndeletePrivateCloudRequest(proto.Message): ) -class Cluster(proto.Message): - r"""A cluster in a private cloud. - - Attributes: - name (str): - Output only. The resource name of this cluster. Resource - names are schemeless URIs that follow the conventions in - https://cloud.google.com/apis/design/resource_names. For - example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/clusters/my-cluster`` - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Creation time of this resource. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Last update time of this - resource. - state (google.cloud.vmwareengine_v1.types.Cluster.State): - Output only. State of the resource. - management (bool): - Output only. True if the cluster is a - management cluster; false otherwise. There can - only be one management cluster in a private - cloud and it has to be the first one. - uid (str): - Output only. System-generated unique - identifier for the resource. - node_type_configs (MutableMapping[str, google.cloud.vmwareengine_v1.types.NodeTypeConfig]): - Required. The map of cluster node types in this cluster, - where the key is canonical identifier of the node type - (corresponds to the ``NodeType``). - """ - - class State(proto.Enum): - r"""Enum State defines possible states of private cloud clusters. - - Values: - STATE_UNSPECIFIED (0): - The default value. This value should never be - used. - ACTIVE (1): - The Cluster is operational and can be used by - the user. - CREATING (2): - The Cluster is being deployed. - UPDATING (3): - Adding or removing of a node to the cluster, - any other cluster specific updates. - DELETING (4): - The Cluster is being deleted. - REPAIRING (5): - The Cluster is undergoing maintenance, for - example: a failed node is getting replaced. - """ - STATE_UNSPECIFIED = 0 - ACTIVE = 1 - CREATING = 2 - UPDATING = 3 - DELETING = 4 - REPAIRING = 5 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=6, - enum=State, - ) - management: bool = proto.Field( - proto.BOOL, - number=7, - ) - uid: str = proto.Field( - proto.STRING, - number=14, - ) - node_type_configs: MutableMapping[str, "NodeTypeConfig"] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=16, - message="NodeTypeConfig", - ) - - class ListClustersRequest(proto.Message): r"""Request message for [VmwareEngine.ListClusters][google.cloud.vmwareengine.v1.VmwareEngine.ListClusters] @@ -854,10 +496,10 @@ class ListClustersResponse(proto.Message): def raw_page(self): return self - clusters: MutableSequence["Cluster"] = proto.RepeatedField( + clusters: MutableSequence[vmwareengine_resources.Cluster] = proto.RepeatedField( proto.MESSAGE, number=1, - message="Cluster", + message=vmwareengine_resources.Cluster, ) next_page_token: str = proto.Field( proto.STRING, @@ -934,10 +576,10 @@ class CreateClusterRequest(proto.Message): proto.STRING, number=2, ) - cluster: "Cluster" = proto.Field( + cluster: vmwareengine_resources.Cluster = proto.Field( proto.MESSAGE, number=3, - message="Cluster", + message=vmwareengine_resources.Cluster, ) request_id: str = proto.Field( proto.STRING, @@ -978,10 +620,10 @@ class UpdateClusterRequest(proto.Message): number=1, message=field_mask_pb2.FieldMask, ) - cluster: "Cluster" = proto.Field( + cluster: vmwareengine_resources.Cluster = proto.Field( proto.MESSAGE, number=2, - message="Cluster", + message=vmwareengine_resources.Cluster, ) request_id: str = proto.Field( proto.STRING, @@ -1022,76 +664,6 @@ class DeleteClusterRequest(proto.Message): ) -class Subnet(proto.Message): - r"""Subnet in a private cloud. Either ``management`` subnets (such as - vMotion) that are read-only, or ``userDefined``, which can also be - updated. - - Attributes: - name (str): - Output only. The resource name of this subnet. Resource - names are schemeless URIs that follow the conventions in - https://cloud.google.com/apis/design/resource_names. For - example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/subnets/my-subnet`` - ip_cidr_range (str): - The IP address range of the subnet in CIDR - format '10.0.0.0/24'. - gateway_ip (str): - The IP address of the gateway of this subnet. - Must fall within the IP prefix defined above. - type_ (str): - Output only. The type of the subnet. For - example "management" or "userDefined". - state (google.cloud.vmwareengine_v1.types.Subnet.State): - Output only. The state of the resource. - """ - - class State(proto.Enum): - r"""Defines possible states of subnets. - - Values: - STATE_UNSPECIFIED (0): - The default value. This value should never be - used. - ACTIVE (1): - The subnet is ready. - CREATING (2): - The subnet is being created. - UPDATING (3): - The subnet is being updated. - DELETING (4): - The subnet is being deleted. - """ - STATE_UNSPECIFIED = 0 - ACTIVE = 1 - CREATING = 2 - UPDATING = 3 - DELETING = 4 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - ip_cidr_range: str = proto.Field( - proto.STRING, - number=7, - ) - gateway_ip: str = proto.Field( - proto.STRING, - number=8, - ) - type_: str = proto.Field( - proto.STRING, - number=11, - ) - state: State = proto.Field( - proto.ENUM, - number=13, - enum=State, - ) - - class ListSubnetsRequest(proto.Message): r"""Request message for [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] @@ -1150,10 +722,10 @@ class ListSubnetsResponse(proto.Message): def raw_page(self): return self - subnets: MutableSequence["Subnet"] = proto.RepeatedField( + subnets: MutableSequence[vmwareengine_resources.Subnet] = proto.RepeatedField( proto.MESSAGE, number=1, - message="Subnet", + message=vmwareengine_resources.Subnet, ) next_page_token: str = proto.Field( proto.STRING, @@ -1223,73 +795,6 @@ class OperationMetadata(proto.Message): ) -class NodeType(proto.Message): - r"""Describes node type. - - Attributes: - name (str): - Output only. The resource name of this node type. Resource - names are schemeless URIs that follow the conventions in - https://cloud.google.com/apis/design/resource_names. For - example: - ``projects/my-proj/locations/us-central1-a/nodeTypes/standard-72`` - node_type_id (str): - Output only. The canonical identifier of the node type - (corresponds to the ``NodeType``). For example: standard-72. - display_name (str): - Output only. The friendly name for this node - type. For example: ve1-standard-72 - virtual_cpu_count (int): - Output only. The total number of virtual CPUs - in a single node. - total_core_count (int): - Output only. The total number of CPU cores in - a single node. - memory_gb (int): - Output only. The amount of physical memory - available, defined in GB. - disk_size_gb (int): - Output only. The amount of storage available, - defined in GB. - available_custom_core_counts (MutableSequence[int]): - Output only. List of possible values of - custom core count. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - node_type_id: str = proto.Field( - proto.STRING, - number=2, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - virtual_cpu_count: int = proto.Field( - proto.INT32, - number=4, - ) - total_core_count: int = proto.Field( - proto.INT32, - number=5, - ) - memory_gb: int = proto.Field( - proto.INT32, - number=7, - ) - disk_size_gb: int = proto.Field( - proto.INT32, - number=8, - ) - available_custom_core_counts: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=11, - ) - - class ListNodeTypesRequest(proto.Message): r"""Request message for [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] @@ -1382,10 +887,10 @@ class ListNodeTypesResponse(proto.Message): def raw_page(self): return self - node_types: MutableSequence["NodeType"] = proto.RepeatedField( + node_types: MutableSequence[vmwareengine_resources.NodeType] = proto.RepeatedField( proto.MESSAGE, number=1, - message="NodeType", + message=vmwareengine_resources.NodeType, ) next_page_token: str = proto.Field( proto.STRING, @@ -1417,26 +922,6 @@ class GetNodeTypeRequest(proto.Message): ) -class Credentials(proto.Message): - r"""Credentials for a private cloud. - - Attributes: - username (str): - Initial username. - password (str): - Initial password. - """ - - username: str = proto.Field( - proto.STRING, - number=1, - ) - password: str = proto.Field( - proto.STRING, - number=2, - ) - - class ShowNsxCredentialsRequest(proto.Message): r"""Request message for [VmwareEngine.ShowNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowNsxCredentials] @@ -1585,10 +1070,12 @@ class ListHcxActivationKeysResponse(proto.Message): def raw_page(self): return self - hcx_activation_keys: MutableSequence["HcxActivationKey"] = proto.RepeatedField( + hcx_activation_keys: MutableSequence[ + vmwareengine_resources.HcxActivationKey + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message="HcxActivationKey", + message=vmwareengine_resources.HcxActivationKey, ) next_page_token: str = proto.Field( proto.STRING, @@ -1600,78 +1087,6 @@ def raw_page(self): ) -class HcxActivationKey(proto.Message): - r"""HCX activation key. A default key is created during private cloud - provisioning, but this behavior is subject to change and you should - always verify active keys. Use - [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] - to retrieve existing keys and - [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] - to create new ones. - - Attributes: - name (str): - Output only. The resource name of this HcxActivationKey. - Resource names are schemeless URIs that follow the - conventions in - https://cloud.google.com/apis/design/resource_names. For - example: - ``projects/my-project/locations/us-central1/privateClouds/my-cloud/hcxActivationKeys/my-key`` - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Creation time of HCX activation - key. - state (google.cloud.vmwareengine_v1.types.HcxActivationKey.State): - Output only. State of HCX activation key. - activation_key (str): - Output only. HCX activation key. - uid (str): - Output only. System-generated unique - identifier for the resource. - """ - - class State(proto.Enum): - r"""State of HCX activation key - - Values: - STATE_UNSPECIFIED (0): - Unspecified state. - AVAILABLE (1): - State of a newly generated activation key. - CONSUMED (2): - State of key when it has been used to - activate HCX appliance. - CREATING (3): - State of key when it is being created. - """ - STATE_UNSPECIFIED = 0 - AVAILABLE = 1 - CONSUMED = 2 - CREATING = 3 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=3, - enum=State, - ) - activation_key: str = proto.Field( - proto.STRING, - number=4, - ) - uid: str = proto.Field( - proto.STRING, - number=5, - ) - - class ListHcxActivationKeysRequest(proto.Message): r"""Request message for [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] @@ -1789,10 +1204,10 @@ class CreateHcxActivationKeyRequest(proto.Message): proto.STRING, number=1, ) - hcx_activation_key: "HcxActivationKey" = proto.Field( + hcx_activation_key: vmwareengine_resources.HcxActivationKey = proto.Field( proto.MESSAGE, number=2, - message="HcxActivationKey", + message=vmwareengine_resources.HcxActivationKey, ) hcx_activation_key_id: str = proto.Field( proto.STRING, @@ -1804,301 +1219,6 @@ class CreateHcxActivationKeyRequest(proto.Message): ) -class Hcx(proto.Message): - r"""Details about a HCX Cloud Manager appliance. - - Attributes: - internal_ip (str): - Internal IP address of the appliance. - version (str): - Version of the appliance. - state (google.cloud.vmwareengine_v1.types.Hcx.State): - Output only. The state of the appliance. - fqdn (str): - Fully qualified domain name of the appliance. - """ - - class State(proto.Enum): - r"""State of the appliance - - Values: - STATE_UNSPECIFIED (0): - Unspecified appliance state. This is the - default value. - ACTIVE (1): - The appliance is operational and can be used. - CREATING (2): - The appliance is being deployed. - """ - STATE_UNSPECIFIED = 0 - ACTIVE = 1 - CREATING = 2 - - internal_ip: str = proto.Field( - proto.STRING, - number=2, - ) - version: str = proto.Field( - proto.STRING, - number=4, - ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - fqdn: str = proto.Field( - proto.STRING, - number=6, - ) - - -class Nsx(proto.Message): - r"""Details about a NSX Manager appliance. - - Attributes: - internal_ip (str): - Internal IP address of the appliance. - version (str): - Version of the appliance. - state (google.cloud.vmwareengine_v1.types.Nsx.State): - Output only. The state of the appliance. - fqdn (str): - Fully qualified domain name of the appliance. - """ - - class State(proto.Enum): - r"""State of the appliance - - Values: - STATE_UNSPECIFIED (0): - Unspecified appliance state. This is the - default value. - ACTIVE (1): - The appliance is operational and can be used. - CREATING (2): - The appliance is being deployed. - """ - STATE_UNSPECIFIED = 0 - ACTIVE = 1 - CREATING = 2 - - internal_ip: str = proto.Field( - proto.STRING, - number=2, - ) - version: str = proto.Field( - proto.STRING, - number=4, - ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - fqdn: str = proto.Field( - proto.STRING, - number=6, - ) - - -class Vcenter(proto.Message): - r"""Details about a vCenter Server management appliance. - - Attributes: - internal_ip (str): - Internal IP address of the appliance. - version (str): - Version of the appliance. - state (google.cloud.vmwareengine_v1.types.Vcenter.State): - Output only. The state of the appliance. - fqdn (str): - Fully qualified domain name of the appliance. - """ - - class State(proto.Enum): - r"""State of the appliance - - Values: - STATE_UNSPECIFIED (0): - Unspecified appliance state. This is the - default value. - ACTIVE (1): - The appliance is operational and can be used. - CREATING (2): - The appliance is being deployed. - """ - STATE_UNSPECIFIED = 0 - ACTIVE = 1 - CREATING = 2 - - internal_ip: str = proto.Field( - proto.STRING, - number=2, - ) - version: str = proto.Field( - proto.STRING, - number=4, - ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - fqdn: str = proto.Field( - proto.STRING, - number=6, - ) - - -class NetworkPolicy(proto.Message): - r"""Represents a network policy resource. Network policies are - regional resources. You can use a network policy to enable or - disable internet access and external IP access. Network policies - are associated with a VMware Engine network, which might span - across regions. For a given region, a network policy applies to - all private clouds in the VMware Engine network associated with - the policy. - - Attributes: - name (str): - Output only. The resource name of this network policy. - Resource names are schemeless URIs that follow the - conventions in - https://cloud.google.com/apis/design/resource_names. For - example: - ``projects/my-project/locations/us-central1/networkPolicies/my-network-policy`` - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Creation time of this resource. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Last update time of this - resource. - internet_access (google.cloud.vmwareengine_v1.types.NetworkPolicy.NetworkService): - Network service that allows VMware workloads - to access the internet. - external_ip (google.cloud.vmwareengine_v1.types.NetworkPolicy.NetworkService): - Network service that allows External IP addresses to be - assigned to VMware workloads. This service can only be - enabled when ``internet_access`` is also enabled. - edge_services_cidr (str): - Required. IP address range in CIDR notation - used to create internet access and external IP - access. An RFC 1918 CIDR block, with a "/26" - prefix, is required. The range cannot overlap - with any prefixes either in the consumer VPC - network or in use by the private clouds attached - to that VPC network. - uid (str): - Output only. System-generated unique - identifier for the resource. - vmware_engine_network (str): - Optional. The relative resource name of the VMware Engine - network. Specify the name in the following form: - ``projects/{project}/locations/{location}/vmwareEngineNetworks/{vmware_engine_network_id}`` - where ``{project}`` can either be a project number or a - project ID. - description (str): - Optional. User-provided description for this - network policy. - vmware_engine_network_canonical (str): - Output only. The canonical name of the VMware Engine network - in the form: - ``projects/{project_number}/locations/{location}/vmwareEngineNetworks/{vmware_engine_network_id}`` - """ - - class NetworkService(proto.Message): - r"""Represents a network service that is managed by a ``NetworkPolicy`` - resource. A network service provides a way to control an aspect of - external access to VMware workloads. For example, whether the VMware - workloads in the private clouds governed by a network policy can - access or be accessed from the internet. - - Attributes: - enabled (bool): - True if the service is enabled; false - otherwise. - state (google.cloud.vmwareengine_v1.types.NetworkPolicy.NetworkService.State): - Output only. State of the service. New values - may be added to this enum when appropriate. - """ - - class State(proto.Enum): - r"""Enum State defines possible states of a network policy - controlled service. - - Values: - STATE_UNSPECIFIED (0): - Unspecified service state. This is the - default value. - UNPROVISIONED (1): - Service is not provisioned. - RECONCILING (2): - Service is in the process of being - provisioned/deprovisioned. - ACTIVE (3): - Service is active. - """ - STATE_UNSPECIFIED = 0 - UNPROVISIONED = 1 - RECONCILING = 2 - ACTIVE = 3 - - enabled: bool = proto.Field( - proto.BOOL, - number=1, - ) - state: "NetworkPolicy.NetworkService.State" = proto.Field( - proto.ENUM, - number=2, - enum="NetworkPolicy.NetworkService.State", - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - internet_access: NetworkService = proto.Field( - proto.MESSAGE, - number=6, - message=NetworkService, - ) - external_ip: NetworkService = proto.Field( - proto.MESSAGE, - number=7, - message=NetworkService, - ) - edge_services_cidr: str = proto.Field( - proto.STRING, - number=9, - ) - uid: str = proto.Field( - proto.STRING, - number=10, - ) - vmware_engine_network: str = proto.Field( - proto.STRING, - number=12, - ) - description: str = proto.Field( - proto.STRING, - number=13, - ) - vmware_engine_network_canonical: str = proto.Field( - proto.STRING, - number=14, - ) - - class ListNetworkPoliciesRequest(proto.Message): r"""Request message for [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] @@ -2203,10 +1323,12 @@ class ListNetworkPoliciesResponse(proto.Message): def raw_page(self): return self - network_policies: MutableSequence["NetworkPolicy"] = proto.RepeatedField( + network_policies: MutableSequence[ + vmwareengine_resources.NetworkPolicy + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message="NetworkPolicy", + message=vmwareengine_resources.NetworkPolicy, ) next_page_token: str = proto.Field( proto.STRING, @@ -2274,10 +1396,10 @@ class UpdateNetworkPolicyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). """ - network_policy: "NetworkPolicy" = proto.Field( + network_policy: vmwareengine_resources.NetworkPolicy = proto.Field( proto.MESSAGE, number=1, - message="NetworkPolicy", + message=vmwareengine_resources.NetworkPolicy, ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -2348,10 +1470,10 @@ class CreateNetworkPolicyRequest(proto.Message): proto.STRING, number=2, ) - network_policy: "NetworkPolicy" = proto.Field( + network_policy: vmwareengine_resources.NetworkPolicy = proto.Field( proto.MESSAGE, number=3, - message="NetworkPolicy", + message=vmwareengine_resources.NetworkPolicy, ) request_id: str = proto.Field( proto.STRING, @@ -2403,179 +1525,6 @@ class DeleteNetworkPolicyRequest(proto.Message): ) -class VmwareEngineNetwork(proto.Message): - r"""VMware Engine network resource that provides connectivity for - VMware Engine private clouds. - - Attributes: - name (str): - Output only. The resource name of the VMware Engine network. - Resource names are schemeless URIs that follow the - conventions in - https://cloud.google.com/apis/design/resource_names. For - example: - ``projects/my-project/locations/global/vmwareEngineNetworks/my-network`` - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Creation time of this resource. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Last update time of this - resource. - description (str): - User-provided description for this VMware - Engine network. - vpc_networks (MutableSequence[google.cloud.vmwareengine_v1.types.VmwareEngineNetwork.VpcNetwork]): - Output only. VMware Engine service VPC - networks that provide connectivity from a - private cloud to customer projects, the - internet, and other Google Cloud services. - state (google.cloud.vmwareengine_v1.types.VmwareEngineNetwork.State): - Output only. State of the VMware Engine - network. - type_ (google.cloud.vmwareengine_v1.types.VmwareEngineNetwork.Type): - Required. VMware Engine network type. - uid (str): - Output only. System-generated unique - identifier for the resource. - etag (str): - Checksum that may be sent on update and - delete requests to ensure that the user-provided - value is up to date before the server processes - a request. The server computes checksums based - on the value of other fields in the request. - """ - - class State(proto.Enum): - r"""Enum State defines possible states of VMware Engine network. - - Values: - STATE_UNSPECIFIED (0): - The default value. This value is used if the - state is omitted. - CREATING (1): - The VMware Engine network is being created. - ACTIVE (2): - The VMware Engine network is ready. - UPDATING (3): - The VMware Engine network is being updated. - DELETING (4): - The VMware Engine network is being deleted. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - ACTIVE = 2 - UPDATING = 3 - DELETING = 4 - - class Type(proto.Enum): - r"""Enum Type defines possible types of VMware Engine network. - - Values: - TYPE_UNSPECIFIED (0): - The default value. This value should never be - used. - LEGACY (1): - Network type used by private clouds created in projects - without a network of type ``STANDARD``. This network type is - no longer used for new VMware Engine private cloud - deployments. - """ - TYPE_UNSPECIFIED = 0 - LEGACY = 1 - - class VpcNetwork(proto.Message): - r"""Represents a VMware Engine VPC network that is managed by a - VMware Engine network resource. - - Attributes: - type_ (google.cloud.vmwareengine_v1.types.VmwareEngineNetwork.VpcNetwork.Type): - Output only. Type of VPC network (INTRANET, INTERNET, or - GOOGLE_CLOUD) - network (str): - Output only. The relative resource name of the service VPC - network this VMware Engine network is attached to. For - example: ``projects/123123/global/networks/my-network`` - """ - - class Type(proto.Enum): - r"""Enum Type defines possible types of a VMware Engine network - controlled service. - - Values: - TYPE_UNSPECIFIED (0): - The default value. This value should never be - used. - INTRANET (1): - VPC network that will be peered with a - consumer VPC network or the intranet VPC of - another VMware Engine network. Access a private - cloud through Compute Engine VMs on a peered VPC - network or an on-premises resource connected to - a peered consumer VPC network. - INTERNET (2): - VPC network used for internet access to and - from a private cloud. - GOOGLE_CLOUD (3): - VPC network used for access to Google Cloud - services like Cloud Storage. - """ - TYPE_UNSPECIFIED = 0 - INTRANET = 1 - INTERNET = 2 - GOOGLE_CLOUD = 3 - - type_: "VmwareEngineNetwork.VpcNetwork.Type" = proto.Field( - proto.ENUM, - number=1, - enum="VmwareEngineNetwork.VpcNetwork.Type", - ) - network: str = proto.Field( - proto.STRING, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - vpc_networks: MutableSequence[VpcNetwork] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message=VpcNetwork, - ) - state: State = proto.Field( - proto.ENUM, - number=7, - enum=State, - ) - type_: Type = proto.Field( - proto.ENUM, - number=8, - enum=Type, - ) - uid: str = proto.Field( - proto.STRING, - number=9, - ) - etag: str = proto.Field( - proto.STRING, - number=10, - ) - - class CreateVmwareEngineNetworkRequest(proto.Message): r"""Request message for [VmwareEngine.CreateVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.CreateVmwareEngineNetwork] @@ -2640,10 +1589,10 @@ class CreateVmwareEngineNetworkRequest(proto.Message): proto.STRING, number=2, ) - vmware_engine_network: "VmwareEngineNetwork" = proto.Field( + vmware_engine_network: vmwareengine_resources.VmwareEngineNetwork = proto.Field( proto.MESSAGE, number=3, - message="VmwareEngineNetwork", + message=vmwareengine_resources.VmwareEngineNetwork, ) request_id: str = proto.Field( proto.STRING, @@ -2688,10 +1637,10 @@ class UpdateVmwareEngineNetworkRequest(proto.Message): (00000000-0000-0000-0000-000000000000). """ - vmware_engine_network: "VmwareEngineNetwork" = proto.Field( + vmware_engine_network: vmwareengine_resources.VmwareEngineNetwork = proto.Field( proto.MESSAGE, number=1, - message="VmwareEngineNetwork", + message=vmwareengine_resources.VmwareEngineNetwork, ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -2880,11 +1829,11 @@ def raw_page(self): return self vmware_engine_networks: MutableSequence[ - "VmwareEngineNetwork" + vmwareengine_resources.VmwareEngineNetwork ] = proto.RepeatedField( proto.MESSAGE, number=1, - message="VmwareEngineNetwork", + message=vmwareengine_resources.VmwareEngineNetwork, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/vmwareengine_resources.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/vmwareengine_resources.py new file mode 100644 index 000000000000..cda63361162b --- /dev/null +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/vmwareengine_resources.py @@ -0,0 +1,1091 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.vmwareengine.v1", + manifest={ + "NetworkConfig", + "NodeTypeConfig", + "PrivateCloud", + "Cluster", + "Subnet", + "NodeType", + "Credentials", + "HcxActivationKey", + "Hcx", + "Nsx", + "Vcenter", + "NetworkPolicy", + "VmwareEngineNetwork", + }, +) + + +class NetworkConfig(proto.Message): + r"""Network configuration in the consumer project + with which the peering has to be done. + + Attributes: + management_cidr (str): + Required. Management CIDR used by VMware + management appliances. + vmware_engine_network (str): + Optional. The relative resource name of the VMware Engine + network attached to the private cloud. Specify the name in + the following form: + ``projects/{project}/locations/{location}/vmwareEngineNetworks/{vmware_engine_network_id}`` + where ``{project}`` can either be a project number or a + project ID. + vmware_engine_network_canonical (str): + Output only. The canonical name of the VMware Engine network + in the form: + ``projects/{project_number}/locations/{location}/vmwareEngineNetworks/{vmware_engine_network_id}`` + management_ip_address_layout_version (int): + Output only. The IP address layout version of the management + IP address range. Possible versions include: + + - ``managementIpAddressLayoutVersion=1``: Indicates the + legacy IP address layout used by some existing private + clouds. This is no longer supported for new private + clouds as it does not support all features. + - ``managementIpAddressLayoutVersion=2``: Indicates the + latest IP address layout used by all newly created + private clouds. This version supports all current + features. + """ + + management_cidr: str = proto.Field( + proto.STRING, + number=4, + ) + vmware_engine_network: str = proto.Field( + proto.STRING, + number=5, + ) + vmware_engine_network_canonical: str = proto.Field( + proto.STRING, + number=6, + ) + management_ip_address_layout_version: int = proto.Field( + proto.INT32, + number=8, + ) + + +class NodeTypeConfig(proto.Message): + r"""Information about the type and number of nodes associated + with the cluster. + + Attributes: + node_count (int): + Required. The number of nodes of this type in + the cluster + custom_core_count (int): + Optional. Customized number of cores available to each node + of the type. This number must always be one of + ``nodeType.availableCustomCoreCounts``. If zero is provided + max value from ``nodeType.availableCustomCoreCounts`` will + be used. + """ + + node_count: int = proto.Field( + proto.INT32, + number=1, + ) + custom_core_count: int = proto.Field( + proto.INT32, + number=2, + ) + + +class PrivateCloud(proto.Message): + r"""Represents a private cloud resource. Private clouds are zonal + resources. + + Attributes: + name (str): + Output only. The resource name of this private cloud. + Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation time of this resource. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update time of this + resource. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the resource was + scheduled for deletion. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the resource will be + irreversibly deleted. + state (google.cloud.vmwareengine_v1.types.PrivateCloud.State): + Output only. State of the resource. New + values may be added to this enum when + appropriate. + network_config (google.cloud.vmwareengine_v1.types.NetworkConfig): + Required. Network configuration of the + private cloud. + management_cluster (google.cloud.vmwareengine_v1.types.PrivateCloud.ManagementCluster): + Required. Input only. The management cluster for this + private cloud. This field is required during creation of the + private cloud to provide details for the default cluster. + + The following fields can't be changed after private cloud + creation: ``ManagementCluster.clusterId``, + ``ManagementCluster.nodeTypeId``. + description (str): + User-provided description for this private + cloud. + hcx (google.cloud.vmwareengine_v1.types.Hcx): + Output only. HCX appliance. + nsx (google.cloud.vmwareengine_v1.types.Nsx): + Output only. NSX appliance. + vcenter (google.cloud.vmwareengine_v1.types.Vcenter): + Output only. Vcenter appliance. + uid (str): + Output only. System-generated unique + identifier for the resource. + """ + + class State(proto.Enum): + r"""Enum State defines possible states of private clouds. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value should never be + used. + ACTIVE (1): + The private cloud is ready. + CREATING (2): + The private cloud is being created. + UPDATING (3): + The private cloud is being updated. + FAILED (5): + The private cloud is in failed state. + DELETED (6): + The private cloud is scheduled for deletion. + The deletion process can be cancelled by using + the corresponding undelete method. + PURGING (7): + The private cloud is irreversibly deleted and + is being removed from the system. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + UPDATING = 3 + FAILED = 5 + DELETED = 6 + PURGING = 7 + + class ManagementCluster(proto.Message): + r"""Management cluster configuration. + + Attributes: + cluster_id (str): + Required. The user-provided identifier of the new + ``Cluster``. The identifier must meet the following + requirements: + + - Only contains 1-63 alphanumeric characters and hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + node_type_configs (MutableMapping[str, google.cloud.vmwareengine_v1.types.NodeTypeConfig]): + Required. The map of cluster node types in this cluster, + where the key is canonical identifier of the node type + (corresponds to the ``NodeType``). + """ + + cluster_id: str = proto.Field( + proto.STRING, + number=1, + ) + node_type_configs: MutableMapping[str, "NodeTypeConfig"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=7, + message="NodeTypeConfig", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + network_config: "NetworkConfig" = proto.Field( + proto.MESSAGE, + number=9, + message="NetworkConfig", + ) + management_cluster: ManagementCluster = proto.Field( + proto.MESSAGE, + number=10, + message=ManagementCluster, + ) + description: str = proto.Field( + proto.STRING, + number=11, + ) + hcx: "Hcx" = proto.Field( + proto.MESSAGE, + number=17, + message="Hcx", + ) + nsx: "Nsx" = proto.Field( + proto.MESSAGE, + number=18, + message="Nsx", + ) + vcenter: "Vcenter" = proto.Field( + proto.MESSAGE, + number=19, + message="Vcenter", + ) + uid: str = proto.Field( + proto.STRING, + number=20, + ) + + +class Cluster(proto.Message): + r"""A cluster in a private cloud. + + Attributes: + name (str): + Output only. The resource name of this cluster. Resource + names are schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/clusters/my-cluster`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation time of this resource. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update time of this + resource. + state (google.cloud.vmwareengine_v1.types.Cluster.State): + Output only. State of the resource. + management (bool): + Output only. True if the cluster is a + management cluster; false otherwise. There can + only be one management cluster in a private + cloud and it has to be the first one. + uid (str): + Output only. System-generated unique + identifier for the resource. + node_type_configs (MutableMapping[str, google.cloud.vmwareengine_v1.types.NodeTypeConfig]): + Required. The map of cluster node types in this cluster, + where the key is canonical identifier of the node type + (corresponds to the ``NodeType``). + """ + + class State(proto.Enum): + r"""Enum State defines possible states of private cloud clusters. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value should never be + used. + ACTIVE (1): + The Cluster is operational and can be used by + the user. + CREATING (2): + The Cluster is being deployed. + UPDATING (3): + Adding or removing of a node to the cluster, + any other cluster specific updates. + DELETING (4): + The Cluster is being deleted. + REPAIRING (5): + The Cluster is undergoing maintenance, for + example: a failed node is getting replaced. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + UPDATING = 3 + DELETING = 4 + REPAIRING = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + management: bool = proto.Field( + proto.BOOL, + number=7, + ) + uid: str = proto.Field( + proto.STRING, + number=14, + ) + node_type_configs: MutableMapping[str, "NodeTypeConfig"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=16, + message="NodeTypeConfig", + ) + + +class Subnet(proto.Message): + r"""Subnet in a private cloud. Either ``management`` subnets (such as + vMotion) that are read-only, or ``userDefined``, which can also be + updated. + + Attributes: + name (str): + Output only. The resource name of this subnet. Resource + names are schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/subnets/my-subnet`` + ip_cidr_range (str): + The IP address range of the subnet in CIDR + format '10.0.0.0/24'. + gateway_ip (str): + The IP address of the gateway of this subnet. + Must fall within the IP prefix defined above. + type_ (str): + Output only. The type of the subnet. For + example "management" or "userDefined". + state (google.cloud.vmwareengine_v1.types.Subnet.State): + Output only. The state of the resource. + """ + + class State(proto.Enum): + r"""Defines possible states of subnets. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value should never be + used. + ACTIVE (1): + The subnet is ready. + CREATING (2): + The subnet is being created. + UPDATING (3): + The subnet is being updated. + DELETING (4): + The subnet is being deleted. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + UPDATING = 3 + DELETING = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + ip_cidr_range: str = proto.Field( + proto.STRING, + number=7, + ) + gateway_ip: str = proto.Field( + proto.STRING, + number=8, + ) + type_: str = proto.Field( + proto.STRING, + number=11, + ) + state: State = proto.Field( + proto.ENUM, + number=13, + enum=State, + ) + + +class NodeType(proto.Message): + r"""Describes node type. + + Attributes: + name (str): + Output only. The resource name of this node type. Resource + names are schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-proj/locations/us-central1-a/nodeTypes/standard-72`` + node_type_id (str): + Output only. The canonical identifier of the node type + (corresponds to the ``NodeType``). For example: standard-72. + display_name (str): + Output only. The friendly name for this node + type. For example: ve1-standard-72 + virtual_cpu_count (int): + Output only. The total number of virtual CPUs + in a single node. + total_core_count (int): + Output only. The total number of CPU cores in + a single node. + memory_gb (int): + Output only. The amount of physical memory + available, defined in GB. + disk_size_gb (int): + Output only. The amount of storage available, + defined in GB. + available_custom_core_counts (MutableSequence[int]): + Output only. List of possible values of + custom core count. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + node_type_id: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + virtual_cpu_count: int = proto.Field( + proto.INT32, + number=4, + ) + total_core_count: int = proto.Field( + proto.INT32, + number=5, + ) + memory_gb: int = proto.Field( + proto.INT32, + number=7, + ) + disk_size_gb: int = proto.Field( + proto.INT32, + number=8, + ) + available_custom_core_counts: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=11, + ) + + +class Credentials(proto.Message): + r"""Credentials for a private cloud. + + Attributes: + username (str): + Initial username. + password (str): + Initial password. + """ + + username: str = proto.Field( + proto.STRING, + number=1, + ) + password: str = proto.Field( + proto.STRING, + number=2, + ) + + +class HcxActivationKey(proto.Message): + r"""HCX activation key. A default key is created during private cloud + provisioning, but this behavior is subject to change and you should + always verify active keys. Use + [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + to retrieve existing keys and + [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] + to create new ones. + + Attributes: + name (str): + Output only. The resource name of this HcxActivationKey. + Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/privateClouds/my-cloud/hcxActivationKeys/my-key`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation time of HCX activation + key. + state (google.cloud.vmwareengine_v1.types.HcxActivationKey.State): + Output only. State of HCX activation key. + activation_key (str): + Output only. HCX activation key. + uid (str): + Output only. System-generated unique + identifier for the resource. + """ + + class State(proto.Enum): + r"""State of HCX activation key + + Values: + STATE_UNSPECIFIED (0): + Unspecified state. + AVAILABLE (1): + State of a newly generated activation key. + CONSUMED (2): + State of key when it has been used to + activate HCX appliance. + CREATING (3): + State of key when it is being created. + """ + STATE_UNSPECIFIED = 0 + AVAILABLE = 1 + CONSUMED = 2 + CREATING = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=3, + enum=State, + ) + activation_key: str = proto.Field( + proto.STRING, + number=4, + ) + uid: str = proto.Field( + proto.STRING, + number=5, + ) + + +class Hcx(proto.Message): + r"""Details about a HCX Cloud Manager appliance. + + Attributes: + internal_ip (str): + Internal IP address of the appliance. + version (str): + Version of the appliance. + state (google.cloud.vmwareengine_v1.types.Hcx.State): + Output only. The state of the appliance. + fqdn (str): + Fully qualified domain name of the appliance. + """ + + class State(proto.Enum): + r"""State of the appliance + + Values: + STATE_UNSPECIFIED (0): + Unspecified appliance state. This is the + default value. + ACTIVE (1): + The appliance is operational and can be used. + CREATING (2): + The appliance is being deployed. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + + internal_ip: str = proto.Field( + proto.STRING, + number=2, + ) + version: str = proto.Field( + proto.STRING, + number=4, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + fqdn: str = proto.Field( + proto.STRING, + number=6, + ) + + +class Nsx(proto.Message): + r"""Details about a NSX Manager appliance. + + Attributes: + internal_ip (str): + Internal IP address of the appliance. + version (str): + Version of the appliance. + state (google.cloud.vmwareengine_v1.types.Nsx.State): + Output only. The state of the appliance. + fqdn (str): + Fully qualified domain name of the appliance. + """ + + class State(proto.Enum): + r"""State of the appliance + + Values: + STATE_UNSPECIFIED (0): + Unspecified appliance state. This is the + default value. + ACTIVE (1): + The appliance is operational and can be used. + CREATING (2): + The appliance is being deployed. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + + internal_ip: str = proto.Field( + proto.STRING, + number=2, + ) + version: str = proto.Field( + proto.STRING, + number=4, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + fqdn: str = proto.Field( + proto.STRING, + number=6, + ) + + +class Vcenter(proto.Message): + r"""Details about a vCenter Server management appliance. + + Attributes: + internal_ip (str): + Internal IP address of the appliance. + version (str): + Version of the appliance. + state (google.cloud.vmwareengine_v1.types.Vcenter.State): + Output only. The state of the appliance. + fqdn (str): + Fully qualified domain name of the appliance. + """ + + class State(proto.Enum): + r"""State of the appliance + + Values: + STATE_UNSPECIFIED (0): + Unspecified appliance state. This is the + default value. + ACTIVE (1): + The appliance is operational and can be used. + CREATING (2): + The appliance is being deployed. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + + internal_ip: str = proto.Field( + proto.STRING, + number=2, + ) + version: str = proto.Field( + proto.STRING, + number=4, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + fqdn: str = proto.Field( + proto.STRING, + number=6, + ) + + +class NetworkPolicy(proto.Message): + r"""Represents a network policy resource. Network policies are + regional resources. You can use a network policy to enable or + disable internet access and external IP access. Network policies + are associated with a VMware Engine network, which might span + across regions. For a given region, a network policy applies to + all private clouds in the VMware Engine network associated with + the policy. + + Attributes: + name (str): + Output only. The resource name of this network policy. + Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-network-policy`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation time of this resource. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update time of this + resource. + internet_access (google.cloud.vmwareengine_v1.types.NetworkPolicy.NetworkService): + Network service that allows VMware workloads + to access the internet. + external_ip (google.cloud.vmwareengine_v1.types.NetworkPolicy.NetworkService): + Network service that allows External IP addresses to be + assigned to VMware workloads. This service can only be + enabled when ``internet_access`` is also enabled. + edge_services_cidr (str): + Required. IP address range in CIDR notation + used to create internet access and external IP + access. An RFC 1918 CIDR block, with a "/26" + prefix, is required. The range cannot overlap + with any prefixes either in the consumer VPC + network or in use by the private clouds attached + to that VPC network. + uid (str): + Output only. System-generated unique + identifier for the resource. + vmware_engine_network (str): + Optional. The relative resource name of the VMware Engine + network. Specify the name in the following form: + ``projects/{project}/locations/{location}/vmwareEngineNetworks/{vmware_engine_network_id}`` + where ``{project}`` can either be a project number or a + project ID. + description (str): + Optional. User-provided description for this + network policy. + vmware_engine_network_canonical (str): + Output only. The canonical name of the VMware Engine network + in the form: + ``projects/{project_number}/locations/{location}/vmwareEngineNetworks/{vmware_engine_network_id}`` + """ + + class NetworkService(proto.Message): + r"""Represents a network service that is managed by a ``NetworkPolicy`` + resource. A network service provides a way to control an aspect of + external access to VMware workloads. For example, whether the VMware + workloads in the private clouds governed by a network policy can + access or be accessed from the internet. + + Attributes: + enabled (bool): + True if the service is enabled; false + otherwise. + state (google.cloud.vmwareengine_v1.types.NetworkPolicy.NetworkService.State): + Output only. State of the service. New values + may be added to this enum when appropriate. + """ + + class State(proto.Enum): + r"""Enum State defines possible states of a network policy + controlled service. + + Values: + STATE_UNSPECIFIED (0): + Unspecified service state. This is the + default value. + UNPROVISIONED (1): + Service is not provisioned. + RECONCILING (2): + Service is in the process of being + provisioned/deprovisioned. + ACTIVE (3): + Service is active. + """ + STATE_UNSPECIFIED = 0 + UNPROVISIONED = 1 + RECONCILING = 2 + ACTIVE = 3 + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + state: "NetworkPolicy.NetworkService.State" = proto.Field( + proto.ENUM, + number=2, + enum="NetworkPolicy.NetworkService.State", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + internet_access: NetworkService = proto.Field( + proto.MESSAGE, + number=6, + message=NetworkService, + ) + external_ip: NetworkService = proto.Field( + proto.MESSAGE, + number=7, + message=NetworkService, + ) + edge_services_cidr: str = proto.Field( + proto.STRING, + number=9, + ) + uid: str = proto.Field( + proto.STRING, + number=10, + ) + vmware_engine_network: str = proto.Field( + proto.STRING, + number=12, + ) + description: str = proto.Field( + proto.STRING, + number=13, + ) + vmware_engine_network_canonical: str = proto.Field( + proto.STRING, + number=14, + ) + + +class VmwareEngineNetwork(proto.Message): + r"""VMware Engine network resource that provides connectivity for + VMware Engine private clouds. + + Attributes: + name (str): + Output only. The resource name of the VMware Engine network. + Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/vmwareEngineNetworks/my-network`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation time of this resource. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update time of this + resource. + description (str): + User-provided description for this VMware + Engine network. + vpc_networks (MutableSequence[google.cloud.vmwareengine_v1.types.VmwareEngineNetwork.VpcNetwork]): + Output only. VMware Engine service VPC + networks that provide connectivity from a + private cloud to customer projects, the + internet, and other Google Cloud services. + state (google.cloud.vmwareengine_v1.types.VmwareEngineNetwork.State): + Output only. State of the VMware Engine + network. + type_ (google.cloud.vmwareengine_v1.types.VmwareEngineNetwork.Type): + Required. VMware Engine network type. + uid (str): + Output only. System-generated unique + identifier for the resource. + etag (str): + Checksum that may be sent on update and + delete requests to ensure that the user-provided + value is up to date before the server processes + a request. The server computes checksums based + on the value of other fields in the request. + """ + + class State(proto.Enum): + r"""Enum State defines possible states of VMware Engine network. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is used if the + state is omitted. + CREATING (1): + The VMware Engine network is being created. + ACTIVE (2): + The VMware Engine network is ready. + UPDATING (3): + The VMware Engine network is being updated. + DELETING (4): + The VMware Engine network is being deleted. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + UPDATING = 3 + DELETING = 4 + + class Type(proto.Enum): + r"""Enum Type defines possible types of VMware Engine network. + + Values: + TYPE_UNSPECIFIED (0): + The default value. This value should never be + used. + LEGACY (1): + Network type used by private clouds created in projects + without a network of type ``STANDARD``. This network type is + no longer used for new VMware Engine private cloud + deployments. + """ + TYPE_UNSPECIFIED = 0 + LEGACY = 1 + + class VpcNetwork(proto.Message): + r"""Represents a VMware Engine VPC network that is managed by a + VMware Engine network resource. + + Attributes: + type_ (google.cloud.vmwareengine_v1.types.VmwareEngineNetwork.VpcNetwork.Type): + Output only. Type of VPC network (INTRANET, INTERNET, or + GOOGLE_CLOUD) + network (str): + Output only. The relative resource name of the service VPC + network this VMware Engine network is attached to. For + example: ``projects/123123/global/networks/my-network`` + """ + + class Type(proto.Enum): + r"""Enum Type defines possible types of a VMware Engine network + controlled service. + + Values: + TYPE_UNSPECIFIED (0): + The default value. This value should never be + used. + INTRANET (1): + VPC network that will be peered with a + consumer VPC network or the intranet VPC of + another VMware Engine network. Access a private + cloud through Compute Engine VMs on a peered VPC + network or an on-premises resource connected to + a peered consumer VPC network. + INTERNET (2): + VPC network used for internet access to and + from a private cloud. + GOOGLE_CLOUD (3): + VPC network used for access to Google Cloud + services like Cloud Storage. + """ + TYPE_UNSPECIFIED = 0 + INTRANET = 1 + INTERNET = 2 + GOOGLE_CLOUD = 3 + + type_: "VmwareEngineNetwork.VpcNetwork.Type" = proto.Field( + proto.ENUM, + number=1, + enum="VmwareEngineNetwork.VpcNetwork.Type", + ) + network: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + vpc_networks: MutableSequence[VpcNetwork] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=VpcNetwork, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + type_: Type = proto.Field( + proto.ENUM, + number=8, + enum=Type, + ) + uid: str = proto.Field( + proto.STRING, + number=9, + ) + etag: str = proto.Field( + proto.STRING, + number=10, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-vmwareengine/noxfile.py b/packages/google-cloud-vmwareengine/noxfile.py index fcacc5d528dd..1749edb5dad7 100644 --- a/packages/google-cloud-vmwareengine/noxfile.py +++ b/packages/google-cloud-vmwareengine/noxfile.py @@ -189,9 +189,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -378,8 +378,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/snippet_metadata_google.cloud.vmwareengine.v1.json b/packages/google-cloud-vmwareengine/samples/generated_samples/snippet_metadata_google.cloud.vmwareengine.v1.json index 8e58f9e41a5c..789b39d64698 100644 --- a/packages/google-cloud-vmwareengine/samples/generated_samples/snippet_metadata_google.cloud.vmwareengine.v1.json +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/snippet_metadata_google.cloud.vmwareengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-vmwareengine", - "version": "0.2.2" + "version": "1.0.1" }, "snippets": [ { @@ -599,12 +599,12 @@ "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreatePrivateCloud_async", "segments": [ { - "end": 60, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 61, "start": 27, "type": "SHORT" }, @@ -614,18 +614,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -687,12 +687,12 @@ "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreatePrivateCloud_sync", "segments": [ { - "end": 60, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 61, "start": 27, "type": "SHORT" }, @@ -702,18 +702,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -4829,12 +4829,12 @@ "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdatePrivateCloud_async", "segments": [ { - "end": 58, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 59, "start": 27, "type": "SHORT" }, @@ -4844,18 +4844,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 55, - "start": 49, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], @@ -4913,12 +4913,12 @@ "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdatePrivateCloud_sync", "segments": [ { - "end": 58, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 59, "start": 27, "type": "SHORT" }, @@ -4928,18 +4928,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 55, - "start": 49, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_private_cloud_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_private_cloud_async.py index a7f45836b0ae..44d5c3bac8eb 100644 --- a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_private_cloud_async.py +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_private_cloud_async.py @@ -41,6 +41,7 @@ async def sample_create_private_cloud(): # Initialize request argument(s) private_cloud = vmwareengine_v1.PrivateCloud() private_cloud.network_config.management_cidr = "management_cidr_value" + private_cloud.management_cluster.cluster_id = "cluster_id_value" request = vmwareengine_v1.CreatePrivateCloudRequest( parent="parent_value", diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_private_cloud_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_private_cloud_sync.py index d57b2786c1a0..a25c75154bfe 100644 --- a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_private_cloud_sync.py +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_private_cloud_sync.py @@ -41,6 +41,7 @@ def sample_create_private_cloud(): # Initialize request argument(s) private_cloud = vmwareengine_v1.PrivateCloud() private_cloud.network_config.management_cidr = "management_cidr_value" + private_cloud.management_cluster.cluster_id = "cluster_id_value" request = vmwareengine_v1.CreatePrivateCloudRequest( parent="parent_value", diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_private_cloud_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_private_cloud_async.py index 3c174331d0e5..aa26c2b263a8 100644 --- a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_private_cloud_async.py +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_private_cloud_async.py @@ -41,6 +41,7 @@ async def sample_update_private_cloud(): # Initialize request argument(s) private_cloud = vmwareengine_v1.PrivateCloud() private_cloud.network_config.management_cidr = "management_cidr_value" + private_cloud.management_cluster.cluster_id = "cluster_id_value" request = vmwareengine_v1.UpdatePrivateCloudRequest( private_cloud=private_cloud, diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_private_cloud_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_private_cloud_sync.py index 9aa6cbbd9bb3..38c7a659c9ef 100644 --- a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_private_cloud_sync.py +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_private_cloud_sync.py @@ -41,6 +41,7 @@ def sample_update_private_cloud(): # Initialize request argument(s) private_cloud = vmwareengine_v1.PrivateCloud() private_cloud.network_config.management_cidr = "management_cidr_value" + private_cloud.management_cluster.cluster_id = "cluster_id_value" request = vmwareengine_v1.UpdatePrivateCloudRequest( private_cloud=private_cloud, diff --git a/packages/google-cloud-vmwareengine/setup.py b/packages/google-cloud-vmwareengine/setup.py index c56f945af166..e2fec52a7c33 100644 --- a/packages/google-cloud-vmwareengine/setup.py +++ b/packages/google-cloud-vmwareengine/setup.py @@ -42,7 +42,7 @@ "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] url = "https://github.com/googleapis/google-cloud-python" @@ -58,9 +58,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.cloud"] setuptools.setup( name=name, diff --git a/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py b/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py index 1e84bcee4ce8..ff1eacc2e042 100644 --- a/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py +++ b/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py @@ -65,7 +65,7 @@ pagers, transports, ) -from google.cloud.vmwareengine_v1.types import vmwareengine +from google.cloud.vmwareengine_v1.types import vmwareengine, vmwareengine_resources def client_cert_source_callback(): @@ -970,9 +970,9 @@ def test_list_private_clouds_pager(transport_name: str = "grpc"): call.side_effect = ( vmwareengine.ListPrivateCloudsResponse( private_clouds=[ - vmwareengine.PrivateCloud(), - vmwareengine.PrivateCloud(), - vmwareengine.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), ], next_page_token="abc", ), @@ -982,14 +982,14 @@ def test_list_private_clouds_pager(transport_name: str = "grpc"): ), vmwareengine.ListPrivateCloudsResponse( private_clouds=[ - vmwareengine.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), ], next_page_token="ghi", ), vmwareengine.ListPrivateCloudsResponse( private_clouds=[ - vmwareengine.PrivateCloud(), - vmwareengine.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), ], ), RuntimeError, @@ -1005,7 +1005,7 @@ def test_list_private_clouds_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine.PrivateCloud) for i in results) + assert all(isinstance(i, vmwareengine_resources.PrivateCloud) for i in results) def test_list_private_clouds_pages(transport_name: str = "grpc"): @@ -1022,9 +1022,9 @@ def test_list_private_clouds_pages(transport_name: str = "grpc"): call.side_effect = ( vmwareengine.ListPrivateCloudsResponse( private_clouds=[ - vmwareengine.PrivateCloud(), - vmwareengine.PrivateCloud(), - vmwareengine.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), ], next_page_token="abc", ), @@ -1034,14 +1034,14 @@ def test_list_private_clouds_pages(transport_name: str = "grpc"): ), vmwareengine.ListPrivateCloudsResponse( private_clouds=[ - vmwareengine.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), ], next_page_token="ghi", ), vmwareengine.ListPrivateCloudsResponse( private_clouds=[ - vmwareengine.PrivateCloud(), - vmwareengine.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), ], ), RuntimeError, @@ -1067,9 +1067,9 @@ async def test_list_private_clouds_async_pager(): call.side_effect = ( vmwareengine.ListPrivateCloudsResponse( private_clouds=[ - vmwareengine.PrivateCloud(), - vmwareengine.PrivateCloud(), - vmwareengine.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), ], next_page_token="abc", ), @@ -1079,14 +1079,14 @@ async def test_list_private_clouds_async_pager(): ), vmwareengine.ListPrivateCloudsResponse( private_clouds=[ - vmwareengine.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), ], next_page_token="ghi", ), vmwareengine.ListPrivateCloudsResponse( private_clouds=[ - vmwareengine.PrivateCloud(), - vmwareengine.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), ], ), RuntimeError, @@ -1100,7 +1100,9 @@ async def test_list_private_clouds_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, vmwareengine.PrivateCloud) for i in responses) + assert all( + isinstance(i, vmwareengine_resources.PrivateCloud) for i in responses + ) @pytest.mark.asyncio @@ -1119,9 +1121,9 @@ async def test_list_private_clouds_async_pages(): call.side_effect = ( vmwareengine.ListPrivateCloudsResponse( private_clouds=[ - vmwareengine.PrivateCloud(), - vmwareengine.PrivateCloud(), - vmwareengine.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), ], next_page_token="abc", ), @@ -1131,14 +1133,14 @@ async def test_list_private_clouds_async_pages(): ), vmwareengine.ListPrivateCloudsResponse( private_clouds=[ - vmwareengine.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), ], next_page_token="ghi", ), vmwareengine.ListPrivateCloudsResponse( private_clouds=[ - vmwareengine.PrivateCloud(), - vmwareengine.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), ], ), RuntimeError, @@ -1174,9 +1176,9 @@ def test_get_private_cloud(request_type, transport: str = "grpc"): type(client.transport.get_private_cloud), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.PrivateCloud( + call.return_value = vmwareengine_resources.PrivateCloud( name="name_value", - state=vmwareengine.PrivateCloud.State.ACTIVE, + state=vmwareengine_resources.PrivateCloud.State.ACTIVE, description="description_value", uid="uid_value", ) @@ -1188,9 +1190,9 @@ def test_get_private_cloud(request_type, transport: str = "grpc"): assert args[0] == vmwareengine.GetPrivateCloudRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.PrivateCloud) + assert isinstance(response, vmwareengine_resources.PrivateCloud) assert response.name == "name_value" - assert response.state == vmwareengine.PrivateCloud.State.ACTIVE + assert response.state == vmwareengine_resources.PrivateCloud.State.ACTIVE assert response.description == "description_value" assert response.uid == "uid_value" @@ -1232,9 +1234,9 @@ async def test_get_private_cloud_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.PrivateCloud( + vmwareengine_resources.PrivateCloud( name="name_value", - state=vmwareengine.PrivateCloud.State.ACTIVE, + state=vmwareengine_resources.PrivateCloud.State.ACTIVE, description="description_value", uid="uid_value", ) @@ -1247,9 +1249,9 @@ async def test_get_private_cloud_async( assert args[0] == vmwareengine.GetPrivateCloudRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.PrivateCloud) + assert isinstance(response, vmwareengine_resources.PrivateCloud) assert response.name == "name_value" - assert response.state == vmwareengine.PrivateCloud.State.ACTIVE + assert response.state == vmwareengine_resources.PrivateCloud.State.ACTIVE assert response.description == "description_value" assert response.uid == "uid_value" @@ -1274,7 +1276,7 @@ def test_get_private_cloud_field_headers(): with mock.patch.object( type(client.transport.get_private_cloud), "__call__" ) as call: - call.return_value = vmwareengine.PrivateCloud() + call.return_value = vmwareengine_resources.PrivateCloud() client.get_private_cloud(request) # Establish that the underlying gRPC stub method was called. @@ -1307,7 +1309,7 @@ async def test_get_private_cloud_field_headers_async(): type(client.transport.get_private_cloud), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.PrivateCloud() + vmwareengine_resources.PrivateCloud() ) await client.get_private_cloud(request) @@ -1334,7 +1336,7 @@ def test_get_private_cloud_flattened(): type(client.transport.get_private_cloud), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.PrivateCloud() + call.return_value = vmwareengine_resources.PrivateCloud() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_private_cloud( @@ -1375,10 +1377,10 @@ async def test_get_private_cloud_flattened_async(): type(client.transport.get_private_cloud), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.PrivateCloud() + call.return_value = vmwareengine_resources.PrivateCloud() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.PrivateCloud() + vmwareengine_resources.PrivateCloud() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. @@ -1579,7 +1581,7 @@ def test_create_private_cloud_flattened(): # using the keyword arguments to the method. client.create_private_cloud( parent="parent_value", - private_cloud=vmwareengine.PrivateCloud(name="name_value"), + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), private_cloud_id="private_cloud_id_value", ) @@ -1591,7 +1593,7 @@ def test_create_private_cloud_flattened(): mock_val = "parent_value" assert arg == mock_val arg = args[0].private_cloud - mock_val = vmwareengine.PrivateCloud(name="name_value") + mock_val = vmwareengine_resources.PrivateCloud(name="name_value") assert arg == mock_val arg = args[0].private_cloud_id mock_val = "private_cloud_id_value" @@ -1609,7 +1611,7 @@ def test_create_private_cloud_flattened_error(): client.create_private_cloud( vmwareengine.CreatePrivateCloudRequest(), parent="parent_value", - private_cloud=vmwareengine.PrivateCloud(name="name_value"), + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), private_cloud_id="private_cloud_id_value", ) @@ -1634,7 +1636,7 @@ async def test_create_private_cloud_flattened_async(): # using the keyword arguments to the method. response = await client.create_private_cloud( parent="parent_value", - private_cloud=vmwareengine.PrivateCloud(name="name_value"), + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), private_cloud_id="private_cloud_id_value", ) @@ -1646,7 +1648,7 @@ async def test_create_private_cloud_flattened_async(): mock_val = "parent_value" assert arg == mock_val arg = args[0].private_cloud - mock_val = vmwareengine.PrivateCloud(name="name_value") + mock_val = vmwareengine_resources.PrivateCloud(name="name_value") assert arg == mock_val arg = args[0].private_cloud_id mock_val = "private_cloud_id_value" @@ -1665,7 +1667,7 @@ async def test_create_private_cloud_flattened_error_async(): await client.create_private_cloud( vmwareengine.CreatePrivateCloudRequest(), parent="parent_value", - private_cloud=vmwareengine.PrivateCloud(name="name_value"), + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), private_cloud_id="private_cloud_id_value", ) @@ -1838,7 +1840,7 @@ def test_update_private_cloud_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_private_cloud( - private_cloud=vmwareengine.PrivateCloud(name="name_value"), + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1847,7 +1849,7 @@ def test_update_private_cloud_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].private_cloud - mock_val = vmwareengine.PrivateCloud(name="name_value") + mock_val = vmwareengine_resources.PrivateCloud(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -1864,7 +1866,7 @@ def test_update_private_cloud_flattened_error(): with pytest.raises(ValueError): client.update_private_cloud( vmwareengine.UpdatePrivateCloudRequest(), - private_cloud=vmwareengine.PrivateCloud(name="name_value"), + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1888,7 +1890,7 @@ async def test_update_private_cloud_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_private_cloud( - private_cloud=vmwareengine.PrivateCloud(name="name_value"), + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1897,7 +1899,7 @@ async def test_update_private_cloud_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].private_cloud - mock_val = vmwareengine.PrivateCloud(name="name_value") + mock_val = vmwareengine_resources.PrivateCloud(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -1915,7 +1917,7 @@ async def test_update_private_cloud_flattened_error_async(): with pytest.raises(ValueError): await client.update_private_cloud( vmwareengine.UpdatePrivateCloudRequest(), - private_cloud=vmwareengine.PrivateCloud(name="name_value"), + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -2649,9 +2651,9 @@ def test_list_clusters_pager(transport_name: str = "grpc"): call.side_effect = ( vmwareengine.ListClustersResponse( clusters=[ - vmwareengine.Cluster(), - vmwareengine.Cluster(), - vmwareengine.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), ], next_page_token="abc", ), @@ -2661,14 +2663,14 @@ def test_list_clusters_pager(transport_name: str = "grpc"): ), vmwareengine.ListClustersResponse( clusters=[ - vmwareengine.Cluster(), + vmwareengine_resources.Cluster(), ], next_page_token="ghi", ), vmwareengine.ListClustersResponse( clusters=[ - vmwareengine.Cluster(), - vmwareengine.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), ], ), RuntimeError, @@ -2684,7 +2686,7 @@ def test_list_clusters_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine.Cluster) for i in results) + assert all(isinstance(i, vmwareengine_resources.Cluster) for i in results) def test_list_clusters_pages(transport_name: str = "grpc"): @@ -2699,9 +2701,9 @@ def test_list_clusters_pages(transport_name: str = "grpc"): call.side_effect = ( vmwareengine.ListClustersResponse( clusters=[ - vmwareengine.Cluster(), - vmwareengine.Cluster(), - vmwareengine.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), ], next_page_token="abc", ), @@ -2711,14 +2713,14 @@ def test_list_clusters_pages(transport_name: str = "grpc"): ), vmwareengine.ListClustersResponse( clusters=[ - vmwareengine.Cluster(), + vmwareengine_resources.Cluster(), ], next_page_token="ghi", ), vmwareengine.ListClustersResponse( clusters=[ - vmwareengine.Cluster(), - vmwareengine.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), ], ), RuntimeError, @@ -2742,9 +2744,9 @@ async def test_list_clusters_async_pager(): call.side_effect = ( vmwareengine.ListClustersResponse( clusters=[ - vmwareengine.Cluster(), - vmwareengine.Cluster(), - vmwareengine.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), ], next_page_token="abc", ), @@ -2754,14 +2756,14 @@ async def test_list_clusters_async_pager(): ), vmwareengine.ListClustersResponse( clusters=[ - vmwareengine.Cluster(), + vmwareengine_resources.Cluster(), ], next_page_token="ghi", ), vmwareengine.ListClustersResponse( clusters=[ - vmwareengine.Cluster(), - vmwareengine.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), ], ), RuntimeError, @@ -2775,7 +2777,7 @@ async def test_list_clusters_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, vmwareengine.Cluster) for i in responses) + assert all(isinstance(i, vmwareengine_resources.Cluster) for i in responses) @pytest.mark.asyncio @@ -2792,9 +2794,9 @@ async def test_list_clusters_async_pages(): call.side_effect = ( vmwareengine.ListClustersResponse( clusters=[ - vmwareengine.Cluster(), - vmwareengine.Cluster(), - vmwareengine.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), ], next_page_token="abc", ), @@ -2804,14 +2806,14 @@ async def test_list_clusters_async_pages(): ), vmwareengine.ListClustersResponse( clusters=[ - vmwareengine.Cluster(), + vmwareengine_resources.Cluster(), ], next_page_token="ghi", ), vmwareengine.ListClustersResponse( clusters=[ - vmwareengine.Cluster(), - vmwareengine.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), ], ), RuntimeError, @@ -2845,9 +2847,9 @@ def test_get_cluster(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.Cluster( + call.return_value = vmwareengine_resources.Cluster( name="name_value", - state=vmwareengine.Cluster.State.ACTIVE, + state=vmwareengine_resources.Cluster.State.ACTIVE, management=True, uid="uid_value", ) @@ -2859,9 +2861,9 @@ def test_get_cluster(request_type, transport: str = "grpc"): assert args[0] == vmwareengine.GetClusterRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.Cluster) + assert isinstance(response, vmwareengine_resources.Cluster) assert response.name == "name_value" - assert response.state == vmwareengine.Cluster.State.ACTIVE + assert response.state == vmwareengine_resources.Cluster.State.ACTIVE assert response.management is True assert response.uid == "uid_value" @@ -2899,9 +2901,9 @@ async def test_get_cluster_async( with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.Cluster( + vmwareengine_resources.Cluster( name="name_value", - state=vmwareengine.Cluster.State.ACTIVE, + state=vmwareengine_resources.Cluster.State.ACTIVE, management=True, uid="uid_value", ) @@ -2914,9 +2916,9 @@ async def test_get_cluster_async( assert args[0] == vmwareengine.GetClusterRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.Cluster) + assert isinstance(response, vmwareengine_resources.Cluster) assert response.name == "name_value" - assert response.state == vmwareengine.Cluster.State.ACTIVE + assert response.state == vmwareengine_resources.Cluster.State.ACTIVE assert response.management is True assert response.uid == "uid_value" @@ -2939,7 +2941,7 @@ def test_get_cluster_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: - call.return_value = vmwareengine.Cluster() + call.return_value = vmwareengine_resources.Cluster() client.get_cluster(request) # Establish that the underlying gRPC stub method was called. @@ -2970,7 +2972,7 @@ async def test_get_cluster_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.Cluster() + vmwareengine_resources.Cluster() ) await client.get_cluster(request) @@ -2995,7 +2997,7 @@ def test_get_cluster_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.Cluster() + call.return_value = vmwareengine_resources.Cluster() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_cluster( @@ -3034,10 +3036,10 @@ async def test_get_cluster_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.Cluster() + call.return_value = vmwareengine_resources.Cluster() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.Cluster() + vmwareengine_resources.Cluster() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. @@ -3226,7 +3228,7 @@ def test_create_cluster_flattened(): # using the keyword arguments to the method. client.create_cluster( parent="parent_value", - cluster=vmwareengine.Cluster(name="name_value"), + cluster=vmwareengine_resources.Cluster(name="name_value"), cluster_id="cluster_id_value", ) @@ -3238,7 +3240,7 @@ def test_create_cluster_flattened(): mock_val = "parent_value" assert arg == mock_val arg = args[0].cluster - mock_val = vmwareengine.Cluster(name="name_value") + mock_val = vmwareengine_resources.Cluster(name="name_value") assert arg == mock_val arg = args[0].cluster_id mock_val = "cluster_id_value" @@ -3256,7 +3258,7 @@ def test_create_cluster_flattened_error(): client.create_cluster( vmwareengine.CreateClusterRequest(), parent="parent_value", - cluster=vmwareengine.Cluster(name="name_value"), + cluster=vmwareengine_resources.Cluster(name="name_value"), cluster_id="cluster_id_value", ) @@ -3279,7 +3281,7 @@ async def test_create_cluster_flattened_async(): # using the keyword arguments to the method. response = await client.create_cluster( parent="parent_value", - cluster=vmwareengine.Cluster(name="name_value"), + cluster=vmwareengine_resources.Cluster(name="name_value"), cluster_id="cluster_id_value", ) @@ -3291,7 +3293,7 @@ async def test_create_cluster_flattened_async(): mock_val = "parent_value" assert arg == mock_val arg = args[0].cluster - mock_val = vmwareengine.Cluster(name="name_value") + mock_val = vmwareengine_resources.Cluster(name="name_value") assert arg == mock_val arg = args[0].cluster_id mock_val = "cluster_id_value" @@ -3310,7 +3312,7 @@ async def test_create_cluster_flattened_error_async(): await client.create_cluster( vmwareengine.CreateClusterRequest(), parent="parent_value", - cluster=vmwareengine.Cluster(name="name_value"), + cluster=vmwareengine_resources.Cluster(name="name_value"), cluster_id="cluster_id_value", ) @@ -3471,7 +3473,7 @@ def test_update_cluster_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_cluster( - cluster=vmwareengine.Cluster(name="name_value"), + cluster=vmwareengine_resources.Cluster(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -3480,7 +3482,7 @@ def test_update_cluster_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].cluster - mock_val = vmwareengine.Cluster(name="name_value") + mock_val = vmwareengine_resources.Cluster(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -3497,7 +3499,7 @@ def test_update_cluster_flattened_error(): with pytest.raises(ValueError): client.update_cluster( vmwareengine.UpdateClusterRequest(), - cluster=vmwareengine.Cluster(name="name_value"), + cluster=vmwareengine_resources.Cluster(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -3519,7 +3521,7 @@ async def test_update_cluster_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_cluster( - cluster=vmwareengine.Cluster(name="name_value"), + cluster=vmwareengine_resources.Cluster(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -3528,7 +3530,7 @@ async def test_update_cluster_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].cluster - mock_val = vmwareengine.Cluster(name="name_value") + mock_val = vmwareengine_resources.Cluster(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -3546,7 +3548,7 @@ async def test_update_cluster_flattened_error_async(): with pytest.raises(ValueError): await client.update_cluster( vmwareengine.UpdateClusterRequest(), - cluster=vmwareengine.Cluster(name="name_value"), + cluster=vmwareengine_resources.Cluster(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -4021,9 +4023,9 @@ def test_list_subnets_pager(transport_name: str = "grpc"): call.side_effect = ( vmwareengine.ListSubnetsResponse( subnets=[ - vmwareengine.Subnet(), - vmwareengine.Subnet(), - vmwareengine.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), ], next_page_token="abc", ), @@ -4033,14 +4035,14 @@ def test_list_subnets_pager(transport_name: str = "grpc"): ), vmwareengine.ListSubnetsResponse( subnets=[ - vmwareengine.Subnet(), + vmwareengine_resources.Subnet(), ], next_page_token="ghi", ), vmwareengine.ListSubnetsResponse( subnets=[ - vmwareengine.Subnet(), - vmwareengine.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), ], ), RuntimeError, @@ -4056,7 +4058,7 @@ def test_list_subnets_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine.Subnet) for i in results) + assert all(isinstance(i, vmwareengine_resources.Subnet) for i in results) def test_list_subnets_pages(transport_name: str = "grpc"): @@ -4071,9 +4073,9 @@ def test_list_subnets_pages(transport_name: str = "grpc"): call.side_effect = ( vmwareengine.ListSubnetsResponse( subnets=[ - vmwareengine.Subnet(), - vmwareengine.Subnet(), - vmwareengine.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), ], next_page_token="abc", ), @@ -4083,14 +4085,14 @@ def test_list_subnets_pages(transport_name: str = "grpc"): ), vmwareengine.ListSubnetsResponse( subnets=[ - vmwareengine.Subnet(), + vmwareengine_resources.Subnet(), ], next_page_token="ghi", ), vmwareengine.ListSubnetsResponse( subnets=[ - vmwareengine.Subnet(), - vmwareengine.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), ], ), RuntimeError, @@ -4114,9 +4116,9 @@ async def test_list_subnets_async_pager(): call.side_effect = ( vmwareengine.ListSubnetsResponse( subnets=[ - vmwareengine.Subnet(), - vmwareengine.Subnet(), - vmwareengine.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), ], next_page_token="abc", ), @@ -4126,14 +4128,14 @@ async def test_list_subnets_async_pager(): ), vmwareengine.ListSubnetsResponse( subnets=[ - vmwareengine.Subnet(), + vmwareengine_resources.Subnet(), ], next_page_token="ghi", ), vmwareengine.ListSubnetsResponse( subnets=[ - vmwareengine.Subnet(), - vmwareengine.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), ], ), RuntimeError, @@ -4147,7 +4149,7 @@ async def test_list_subnets_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, vmwareengine.Subnet) for i in responses) + assert all(isinstance(i, vmwareengine_resources.Subnet) for i in responses) @pytest.mark.asyncio @@ -4164,9 +4166,9 @@ async def test_list_subnets_async_pages(): call.side_effect = ( vmwareengine.ListSubnetsResponse( subnets=[ - vmwareengine.Subnet(), - vmwareengine.Subnet(), - vmwareengine.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), ], next_page_token="abc", ), @@ -4176,14 +4178,14 @@ async def test_list_subnets_async_pages(): ), vmwareengine.ListSubnetsResponse( subnets=[ - vmwareengine.Subnet(), + vmwareengine_resources.Subnet(), ], next_page_token="ghi", ), vmwareengine.ListSubnetsResponse( subnets=[ - vmwareengine.Subnet(), - vmwareengine.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), ], ), RuntimeError, @@ -4445,9 +4447,9 @@ def test_list_node_types_pager(transport_name: str = "grpc"): call.side_effect = ( vmwareengine.ListNodeTypesResponse( node_types=[ - vmwareengine.NodeType(), - vmwareengine.NodeType(), - vmwareengine.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), ], next_page_token="abc", ), @@ -4457,14 +4459,14 @@ def test_list_node_types_pager(transport_name: str = "grpc"): ), vmwareengine.ListNodeTypesResponse( node_types=[ - vmwareengine.NodeType(), + vmwareengine_resources.NodeType(), ], next_page_token="ghi", ), vmwareengine.ListNodeTypesResponse( node_types=[ - vmwareengine.NodeType(), - vmwareengine.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), ], ), RuntimeError, @@ -4480,7 +4482,7 @@ def test_list_node_types_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine.NodeType) for i in results) + assert all(isinstance(i, vmwareengine_resources.NodeType) for i in results) def test_list_node_types_pages(transport_name: str = "grpc"): @@ -4495,9 +4497,9 @@ def test_list_node_types_pages(transport_name: str = "grpc"): call.side_effect = ( vmwareengine.ListNodeTypesResponse( node_types=[ - vmwareengine.NodeType(), - vmwareengine.NodeType(), - vmwareengine.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), ], next_page_token="abc", ), @@ -4507,14 +4509,14 @@ def test_list_node_types_pages(transport_name: str = "grpc"): ), vmwareengine.ListNodeTypesResponse( node_types=[ - vmwareengine.NodeType(), + vmwareengine_resources.NodeType(), ], next_page_token="ghi", ), vmwareengine.ListNodeTypesResponse( node_types=[ - vmwareengine.NodeType(), - vmwareengine.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), ], ), RuntimeError, @@ -4538,9 +4540,9 @@ async def test_list_node_types_async_pager(): call.side_effect = ( vmwareengine.ListNodeTypesResponse( node_types=[ - vmwareengine.NodeType(), - vmwareengine.NodeType(), - vmwareengine.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), ], next_page_token="abc", ), @@ -4550,14 +4552,14 @@ async def test_list_node_types_async_pager(): ), vmwareengine.ListNodeTypesResponse( node_types=[ - vmwareengine.NodeType(), + vmwareengine_resources.NodeType(), ], next_page_token="ghi", ), vmwareengine.ListNodeTypesResponse( node_types=[ - vmwareengine.NodeType(), - vmwareengine.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), ], ), RuntimeError, @@ -4571,7 +4573,7 @@ async def test_list_node_types_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, vmwareengine.NodeType) for i in responses) + assert all(isinstance(i, vmwareengine_resources.NodeType) for i in responses) @pytest.mark.asyncio @@ -4588,9 +4590,9 @@ async def test_list_node_types_async_pages(): call.side_effect = ( vmwareengine.ListNodeTypesResponse( node_types=[ - vmwareengine.NodeType(), - vmwareengine.NodeType(), - vmwareengine.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), ], next_page_token="abc", ), @@ -4600,14 +4602,14 @@ async def test_list_node_types_async_pages(): ), vmwareengine.ListNodeTypesResponse( node_types=[ - vmwareengine.NodeType(), + vmwareengine_resources.NodeType(), ], next_page_token="ghi", ), vmwareengine.ListNodeTypesResponse( node_types=[ - vmwareengine.NodeType(), - vmwareengine.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), ], ), RuntimeError, @@ -4641,7 +4643,7 @@ def test_get_node_type(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.NodeType( + call.return_value = vmwareengine_resources.NodeType( name="name_value", node_type_id="node_type_id_value", display_name="display_name_value", @@ -4659,7 +4661,7 @@ def test_get_node_type(request_type, transport: str = "grpc"): assert args[0] == vmwareengine.GetNodeTypeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.NodeType) + assert isinstance(response, vmwareengine_resources.NodeType) assert response.name == "name_value" assert response.node_type_id == "node_type_id_value" assert response.display_name == "display_name_value" @@ -4703,7 +4705,7 @@ async def test_get_node_type_async( with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.NodeType( + vmwareengine_resources.NodeType( name="name_value", node_type_id="node_type_id_value", display_name="display_name_value", @@ -4722,7 +4724,7 @@ async def test_get_node_type_async( assert args[0] == vmwareengine.GetNodeTypeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.NodeType) + assert isinstance(response, vmwareengine_resources.NodeType) assert response.name == "name_value" assert response.node_type_id == "node_type_id_value" assert response.display_name == "display_name_value" @@ -4751,7 +4753,7 @@ def test_get_node_type_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: - call.return_value = vmwareengine.NodeType() + call.return_value = vmwareengine_resources.NodeType() client.get_node_type(request) # Establish that the underlying gRPC stub method was called. @@ -4782,7 +4784,7 @@ async def test_get_node_type_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.NodeType() + vmwareengine_resources.NodeType() ) await client.get_node_type(request) @@ -4807,7 +4809,7 @@ def test_get_node_type_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.NodeType() + call.return_value = vmwareengine_resources.NodeType() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_node_type( @@ -4846,10 +4848,10 @@ async def test_get_node_type_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.NodeType() + call.return_value = vmwareengine_resources.NodeType() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.NodeType() + vmwareengine_resources.NodeType() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. @@ -4903,7 +4905,7 @@ def test_show_nsx_credentials(request_type, transport: str = "grpc"): type(client.transport.show_nsx_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.Credentials( + call.return_value = vmwareengine_resources.Credentials( username="username_value", password="password_value", ) @@ -4915,7 +4917,7 @@ def test_show_nsx_credentials(request_type, transport: str = "grpc"): assert args[0] == vmwareengine.ShowNsxCredentialsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.Credentials) + assert isinstance(response, vmwareengine_resources.Credentials) assert response.username == "username_value" assert response.password == "password_value" @@ -4957,7 +4959,7 @@ async def test_show_nsx_credentials_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.Credentials( + vmwareengine_resources.Credentials( username="username_value", password="password_value", ) @@ -4970,7 +4972,7 @@ async def test_show_nsx_credentials_async( assert args[0] == vmwareengine.ShowNsxCredentialsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.Credentials) + assert isinstance(response, vmwareengine_resources.Credentials) assert response.username == "username_value" assert response.password == "password_value" @@ -4995,7 +4997,7 @@ def test_show_nsx_credentials_field_headers(): with mock.patch.object( type(client.transport.show_nsx_credentials), "__call__" ) as call: - call.return_value = vmwareengine.Credentials() + call.return_value = vmwareengine_resources.Credentials() client.show_nsx_credentials(request) # Establish that the underlying gRPC stub method was called. @@ -5028,7 +5030,7 @@ async def test_show_nsx_credentials_field_headers_async(): type(client.transport.show_nsx_credentials), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.Credentials() + vmwareengine_resources.Credentials() ) await client.show_nsx_credentials(request) @@ -5055,7 +5057,7 @@ def test_show_nsx_credentials_flattened(): type(client.transport.show_nsx_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.Credentials() + call.return_value = vmwareengine_resources.Credentials() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.show_nsx_credentials( @@ -5096,10 +5098,10 @@ async def test_show_nsx_credentials_flattened_async(): type(client.transport.show_nsx_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.Credentials() + call.return_value = vmwareengine_resources.Credentials() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.Credentials() + vmwareengine_resources.Credentials() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. @@ -5153,7 +5155,7 @@ def test_show_vcenter_credentials(request_type, transport: str = "grpc"): type(client.transport.show_vcenter_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.Credentials( + call.return_value = vmwareengine_resources.Credentials( username="username_value", password="password_value", ) @@ -5165,7 +5167,7 @@ def test_show_vcenter_credentials(request_type, transport: str = "grpc"): assert args[0] == vmwareengine.ShowVcenterCredentialsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.Credentials) + assert isinstance(response, vmwareengine_resources.Credentials) assert response.username == "username_value" assert response.password == "password_value" @@ -5208,7 +5210,7 @@ async def test_show_vcenter_credentials_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.Credentials( + vmwareengine_resources.Credentials( username="username_value", password="password_value", ) @@ -5221,7 +5223,7 @@ async def test_show_vcenter_credentials_async( assert args[0] == vmwareengine.ShowVcenterCredentialsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.Credentials) + assert isinstance(response, vmwareengine_resources.Credentials) assert response.username == "username_value" assert response.password == "password_value" @@ -5246,7 +5248,7 @@ def test_show_vcenter_credentials_field_headers(): with mock.patch.object( type(client.transport.show_vcenter_credentials), "__call__" ) as call: - call.return_value = vmwareengine.Credentials() + call.return_value = vmwareengine_resources.Credentials() client.show_vcenter_credentials(request) # Establish that the underlying gRPC stub method was called. @@ -5279,7 +5281,7 @@ async def test_show_vcenter_credentials_field_headers_async(): type(client.transport.show_vcenter_credentials), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.Credentials() + vmwareengine_resources.Credentials() ) await client.show_vcenter_credentials(request) @@ -5306,7 +5308,7 @@ def test_show_vcenter_credentials_flattened(): type(client.transport.show_vcenter_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.Credentials() + call.return_value = vmwareengine_resources.Credentials() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.show_vcenter_credentials( @@ -5347,10 +5349,10 @@ async def test_show_vcenter_credentials_flattened_async(): type(client.transport.show_vcenter_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.Credentials() + call.return_value = vmwareengine_resources.Credentials() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.Credentials() + vmwareengine_resources.Credentials() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. @@ -6034,7 +6036,9 @@ def test_create_hcx_activation_key_flattened(): # using the keyword arguments to the method. client.create_hcx_activation_key( parent="parent_value", - hcx_activation_key=vmwareengine.HcxActivationKey(name="name_value"), + hcx_activation_key=vmwareengine_resources.HcxActivationKey( + name="name_value" + ), hcx_activation_key_id="hcx_activation_key_id_value", ) @@ -6046,7 +6050,7 @@ def test_create_hcx_activation_key_flattened(): mock_val = "parent_value" assert arg == mock_val arg = args[0].hcx_activation_key - mock_val = vmwareengine.HcxActivationKey(name="name_value") + mock_val = vmwareengine_resources.HcxActivationKey(name="name_value") assert arg == mock_val arg = args[0].hcx_activation_key_id mock_val = "hcx_activation_key_id_value" @@ -6064,7 +6068,9 @@ def test_create_hcx_activation_key_flattened_error(): client.create_hcx_activation_key( vmwareengine.CreateHcxActivationKeyRequest(), parent="parent_value", - hcx_activation_key=vmwareengine.HcxActivationKey(name="name_value"), + hcx_activation_key=vmwareengine_resources.HcxActivationKey( + name="name_value" + ), hcx_activation_key_id="hcx_activation_key_id_value", ) @@ -6089,7 +6095,9 @@ async def test_create_hcx_activation_key_flattened_async(): # using the keyword arguments to the method. response = await client.create_hcx_activation_key( parent="parent_value", - hcx_activation_key=vmwareengine.HcxActivationKey(name="name_value"), + hcx_activation_key=vmwareengine_resources.HcxActivationKey( + name="name_value" + ), hcx_activation_key_id="hcx_activation_key_id_value", ) @@ -6101,7 +6109,7 @@ async def test_create_hcx_activation_key_flattened_async(): mock_val = "parent_value" assert arg == mock_val arg = args[0].hcx_activation_key - mock_val = vmwareengine.HcxActivationKey(name="name_value") + mock_val = vmwareengine_resources.HcxActivationKey(name="name_value") assert arg == mock_val arg = args[0].hcx_activation_key_id mock_val = "hcx_activation_key_id_value" @@ -6120,7 +6128,9 @@ async def test_create_hcx_activation_key_flattened_error_async(): await client.create_hcx_activation_key( vmwareengine.CreateHcxActivationKeyRequest(), parent="parent_value", - hcx_activation_key=vmwareengine.HcxActivationKey(name="name_value"), + hcx_activation_key=vmwareengine_resources.HcxActivationKey( + name="name_value" + ), hcx_activation_key_id="hcx_activation_key_id_value", ) @@ -6390,9 +6400,9 @@ def test_list_hcx_activation_keys_pager(transport_name: str = "grpc"): call.side_effect = ( vmwareengine.ListHcxActivationKeysResponse( hcx_activation_keys=[ - vmwareengine.HcxActivationKey(), - vmwareengine.HcxActivationKey(), - vmwareengine.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), ], next_page_token="abc", ), @@ -6402,14 +6412,14 @@ def test_list_hcx_activation_keys_pager(transport_name: str = "grpc"): ), vmwareengine.ListHcxActivationKeysResponse( hcx_activation_keys=[ - vmwareengine.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), ], next_page_token="ghi", ), vmwareengine.ListHcxActivationKeysResponse( hcx_activation_keys=[ - vmwareengine.HcxActivationKey(), - vmwareengine.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), ], ), RuntimeError, @@ -6425,7 +6435,9 @@ def test_list_hcx_activation_keys_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine.HcxActivationKey) for i in results) + assert all( + isinstance(i, vmwareengine_resources.HcxActivationKey) for i in results + ) def test_list_hcx_activation_keys_pages(transport_name: str = "grpc"): @@ -6442,9 +6454,9 @@ def test_list_hcx_activation_keys_pages(transport_name: str = "grpc"): call.side_effect = ( vmwareengine.ListHcxActivationKeysResponse( hcx_activation_keys=[ - vmwareengine.HcxActivationKey(), - vmwareengine.HcxActivationKey(), - vmwareengine.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), ], next_page_token="abc", ), @@ -6454,14 +6466,14 @@ def test_list_hcx_activation_keys_pages(transport_name: str = "grpc"): ), vmwareengine.ListHcxActivationKeysResponse( hcx_activation_keys=[ - vmwareengine.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), ], next_page_token="ghi", ), vmwareengine.ListHcxActivationKeysResponse( hcx_activation_keys=[ - vmwareengine.HcxActivationKey(), - vmwareengine.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), ], ), RuntimeError, @@ -6487,9 +6499,9 @@ async def test_list_hcx_activation_keys_async_pager(): call.side_effect = ( vmwareengine.ListHcxActivationKeysResponse( hcx_activation_keys=[ - vmwareengine.HcxActivationKey(), - vmwareengine.HcxActivationKey(), - vmwareengine.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), ], next_page_token="abc", ), @@ -6499,14 +6511,14 @@ async def test_list_hcx_activation_keys_async_pager(): ), vmwareengine.ListHcxActivationKeysResponse( hcx_activation_keys=[ - vmwareengine.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), ], next_page_token="ghi", ), vmwareengine.ListHcxActivationKeysResponse( hcx_activation_keys=[ - vmwareengine.HcxActivationKey(), - vmwareengine.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), ], ), RuntimeError, @@ -6520,7 +6532,9 @@ async def test_list_hcx_activation_keys_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, vmwareengine.HcxActivationKey) for i in responses) + assert all( + isinstance(i, vmwareengine_resources.HcxActivationKey) for i in responses + ) @pytest.mark.asyncio @@ -6539,9 +6553,9 @@ async def test_list_hcx_activation_keys_async_pages(): call.side_effect = ( vmwareengine.ListHcxActivationKeysResponse( hcx_activation_keys=[ - vmwareengine.HcxActivationKey(), - vmwareengine.HcxActivationKey(), - vmwareengine.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), ], next_page_token="abc", ), @@ -6551,14 +6565,14 @@ async def test_list_hcx_activation_keys_async_pages(): ), vmwareengine.ListHcxActivationKeysResponse( hcx_activation_keys=[ - vmwareengine.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), ], next_page_token="ghi", ), vmwareengine.ListHcxActivationKeysResponse( hcx_activation_keys=[ - vmwareengine.HcxActivationKey(), - vmwareengine.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), ], ), RuntimeError, @@ -6594,9 +6608,9 @@ def test_get_hcx_activation_key(request_type, transport: str = "grpc"): type(client.transport.get_hcx_activation_key), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.HcxActivationKey( + call.return_value = vmwareengine_resources.HcxActivationKey( name="name_value", - state=vmwareengine.HcxActivationKey.State.AVAILABLE, + state=vmwareengine_resources.HcxActivationKey.State.AVAILABLE, activation_key="activation_key_value", uid="uid_value", ) @@ -6608,9 +6622,9 @@ def test_get_hcx_activation_key(request_type, transport: str = "grpc"): assert args[0] == vmwareengine.GetHcxActivationKeyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.HcxActivationKey) + assert isinstance(response, vmwareengine_resources.HcxActivationKey) assert response.name == "name_value" - assert response.state == vmwareengine.HcxActivationKey.State.AVAILABLE + assert response.state == vmwareengine_resources.HcxActivationKey.State.AVAILABLE assert response.activation_key == "activation_key_value" assert response.uid == "uid_value" @@ -6653,9 +6667,9 @@ async def test_get_hcx_activation_key_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.HcxActivationKey( + vmwareengine_resources.HcxActivationKey( name="name_value", - state=vmwareengine.HcxActivationKey.State.AVAILABLE, + state=vmwareengine_resources.HcxActivationKey.State.AVAILABLE, activation_key="activation_key_value", uid="uid_value", ) @@ -6668,9 +6682,9 @@ async def test_get_hcx_activation_key_async( assert args[0] == vmwareengine.GetHcxActivationKeyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.HcxActivationKey) + assert isinstance(response, vmwareengine_resources.HcxActivationKey) assert response.name == "name_value" - assert response.state == vmwareengine.HcxActivationKey.State.AVAILABLE + assert response.state == vmwareengine_resources.HcxActivationKey.State.AVAILABLE assert response.activation_key == "activation_key_value" assert response.uid == "uid_value" @@ -6695,7 +6709,7 @@ def test_get_hcx_activation_key_field_headers(): with mock.patch.object( type(client.transport.get_hcx_activation_key), "__call__" ) as call: - call.return_value = vmwareengine.HcxActivationKey() + call.return_value = vmwareengine_resources.HcxActivationKey() client.get_hcx_activation_key(request) # Establish that the underlying gRPC stub method was called. @@ -6728,7 +6742,7 @@ async def test_get_hcx_activation_key_field_headers_async(): type(client.transport.get_hcx_activation_key), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.HcxActivationKey() + vmwareengine_resources.HcxActivationKey() ) await client.get_hcx_activation_key(request) @@ -6755,7 +6769,7 @@ def test_get_hcx_activation_key_flattened(): type(client.transport.get_hcx_activation_key), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.HcxActivationKey() + call.return_value = vmwareengine_resources.HcxActivationKey() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_hcx_activation_key( @@ -6796,10 +6810,10 @@ async def test_get_hcx_activation_key_flattened_async(): type(client.transport.get_hcx_activation_key), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.HcxActivationKey() + call.return_value = vmwareengine_resources.HcxActivationKey() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.HcxActivationKey() + vmwareengine_resources.HcxActivationKey() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. @@ -6853,7 +6867,7 @@ def test_get_network_policy(request_type, transport: str = "grpc"): type(client.transport.get_network_policy), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.NetworkPolicy( + call.return_value = vmwareengine_resources.NetworkPolicy( name="name_value", edge_services_cidr="edge_services_cidr_value", uid="uid_value", @@ -6869,7 +6883,7 @@ def test_get_network_policy(request_type, transport: str = "grpc"): assert args[0] == vmwareengine.GetNetworkPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.NetworkPolicy) + assert isinstance(response, vmwareengine_resources.NetworkPolicy) assert response.name == "name_value" assert response.edge_services_cidr == "edge_services_cidr_value" assert response.uid == "uid_value" @@ -6918,7 +6932,7 @@ async def test_get_network_policy_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.NetworkPolicy( + vmwareengine_resources.NetworkPolicy( name="name_value", edge_services_cidr="edge_services_cidr_value", uid="uid_value", @@ -6935,7 +6949,7 @@ async def test_get_network_policy_async( assert args[0] == vmwareengine.GetNetworkPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.NetworkPolicy) + assert isinstance(response, vmwareengine_resources.NetworkPolicy) assert response.name == "name_value" assert response.edge_services_cidr == "edge_services_cidr_value" assert response.uid == "uid_value" @@ -6967,7 +6981,7 @@ def test_get_network_policy_field_headers(): with mock.patch.object( type(client.transport.get_network_policy), "__call__" ) as call: - call.return_value = vmwareengine.NetworkPolicy() + call.return_value = vmwareengine_resources.NetworkPolicy() client.get_network_policy(request) # Establish that the underlying gRPC stub method was called. @@ -7000,7 +7014,7 @@ async def test_get_network_policy_field_headers_async(): type(client.transport.get_network_policy), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.NetworkPolicy() + vmwareengine_resources.NetworkPolicy() ) await client.get_network_policy(request) @@ -7027,7 +7041,7 @@ def test_get_network_policy_flattened(): type(client.transport.get_network_policy), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.NetworkPolicy() + call.return_value = vmwareengine_resources.NetworkPolicy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_network_policy( @@ -7068,10 +7082,10 @@ async def test_get_network_policy_flattened_async(): type(client.transport.get_network_policy), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.NetworkPolicy() + call.return_value = vmwareengine_resources.NetworkPolicy() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.NetworkPolicy() + vmwareengine_resources.NetworkPolicy() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. @@ -7368,9 +7382,9 @@ def test_list_network_policies_pager(transport_name: str = "grpc"): call.side_effect = ( vmwareengine.ListNetworkPoliciesResponse( network_policies=[ - vmwareengine.NetworkPolicy(), - vmwareengine.NetworkPolicy(), - vmwareengine.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), ], next_page_token="abc", ), @@ -7380,14 +7394,14 @@ def test_list_network_policies_pager(transport_name: str = "grpc"): ), vmwareengine.ListNetworkPoliciesResponse( network_policies=[ - vmwareengine.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), ], next_page_token="ghi", ), vmwareengine.ListNetworkPoliciesResponse( network_policies=[ - vmwareengine.NetworkPolicy(), - vmwareengine.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), ], ), RuntimeError, @@ -7403,7 +7417,7 @@ def test_list_network_policies_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine.NetworkPolicy) for i in results) + assert all(isinstance(i, vmwareengine_resources.NetworkPolicy) for i in results) def test_list_network_policies_pages(transport_name: str = "grpc"): @@ -7420,9 +7434,9 @@ def test_list_network_policies_pages(transport_name: str = "grpc"): call.side_effect = ( vmwareengine.ListNetworkPoliciesResponse( network_policies=[ - vmwareengine.NetworkPolicy(), - vmwareengine.NetworkPolicy(), - vmwareengine.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), ], next_page_token="abc", ), @@ -7432,14 +7446,14 @@ def test_list_network_policies_pages(transport_name: str = "grpc"): ), vmwareengine.ListNetworkPoliciesResponse( network_policies=[ - vmwareengine.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), ], next_page_token="ghi", ), vmwareengine.ListNetworkPoliciesResponse( network_policies=[ - vmwareengine.NetworkPolicy(), - vmwareengine.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), ], ), RuntimeError, @@ -7465,9 +7479,9 @@ async def test_list_network_policies_async_pager(): call.side_effect = ( vmwareengine.ListNetworkPoliciesResponse( network_policies=[ - vmwareengine.NetworkPolicy(), - vmwareengine.NetworkPolicy(), - vmwareengine.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), ], next_page_token="abc", ), @@ -7477,14 +7491,14 @@ async def test_list_network_policies_async_pager(): ), vmwareengine.ListNetworkPoliciesResponse( network_policies=[ - vmwareengine.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), ], next_page_token="ghi", ), vmwareengine.ListNetworkPoliciesResponse( network_policies=[ - vmwareengine.NetworkPolicy(), - vmwareengine.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), ], ), RuntimeError, @@ -7498,7 +7512,9 @@ async def test_list_network_policies_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, vmwareengine.NetworkPolicy) for i in responses) + assert all( + isinstance(i, vmwareengine_resources.NetworkPolicy) for i in responses + ) @pytest.mark.asyncio @@ -7517,9 +7533,9 @@ async def test_list_network_policies_async_pages(): call.side_effect = ( vmwareengine.ListNetworkPoliciesResponse( network_policies=[ - vmwareengine.NetworkPolicy(), - vmwareengine.NetworkPolicy(), - vmwareengine.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), ], next_page_token="abc", ), @@ -7529,14 +7545,14 @@ async def test_list_network_policies_async_pages(): ), vmwareengine.ListNetworkPoliciesResponse( network_policies=[ - vmwareengine.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), ], next_page_token="ghi", ), vmwareengine.ListNetworkPoliciesResponse( network_policies=[ - vmwareengine.NetworkPolicy(), - vmwareengine.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), ], ), RuntimeError, @@ -7720,7 +7736,7 @@ def test_create_network_policy_flattened(): # using the keyword arguments to the method. client.create_network_policy( parent="parent_value", - network_policy=vmwareengine.NetworkPolicy(name="name_value"), + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), network_policy_id="network_policy_id_value", ) @@ -7732,7 +7748,7 @@ def test_create_network_policy_flattened(): mock_val = "parent_value" assert arg == mock_val arg = args[0].network_policy - mock_val = vmwareengine.NetworkPolicy(name="name_value") + mock_val = vmwareengine_resources.NetworkPolicy(name="name_value") assert arg == mock_val arg = args[0].network_policy_id mock_val = "network_policy_id_value" @@ -7750,7 +7766,7 @@ def test_create_network_policy_flattened_error(): client.create_network_policy( vmwareengine.CreateNetworkPolicyRequest(), parent="parent_value", - network_policy=vmwareengine.NetworkPolicy(name="name_value"), + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), network_policy_id="network_policy_id_value", ) @@ -7775,7 +7791,7 @@ async def test_create_network_policy_flattened_async(): # using the keyword arguments to the method. response = await client.create_network_policy( parent="parent_value", - network_policy=vmwareengine.NetworkPolicy(name="name_value"), + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), network_policy_id="network_policy_id_value", ) @@ -7787,7 +7803,7 @@ async def test_create_network_policy_flattened_async(): mock_val = "parent_value" assert arg == mock_val arg = args[0].network_policy - mock_val = vmwareengine.NetworkPolicy(name="name_value") + mock_val = vmwareengine_resources.NetworkPolicy(name="name_value") assert arg == mock_val arg = args[0].network_policy_id mock_val = "network_policy_id_value" @@ -7806,7 +7822,7 @@ async def test_create_network_policy_flattened_error_async(): await client.create_network_policy( vmwareengine.CreateNetworkPolicyRequest(), parent="parent_value", - network_policy=vmwareengine.NetworkPolicy(name="name_value"), + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), network_policy_id="network_policy_id_value", ) @@ -7980,7 +7996,7 @@ def test_update_network_policy_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_network_policy( - network_policy=vmwareengine.NetworkPolicy(name="name_value"), + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -7989,7 +8005,7 @@ def test_update_network_policy_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].network_policy - mock_val = vmwareengine.NetworkPolicy(name="name_value") + mock_val = vmwareengine_resources.NetworkPolicy(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -8006,7 +8022,7 @@ def test_update_network_policy_flattened_error(): with pytest.raises(ValueError): client.update_network_policy( vmwareengine.UpdateNetworkPolicyRequest(), - network_policy=vmwareengine.NetworkPolicy(name="name_value"), + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -8030,7 +8046,7 @@ async def test_update_network_policy_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_network_policy( - network_policy=vmwareengine.NetworkPolicy(name="name_value"), + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -8039,7 +8055,7 @@ async def test_update_network_policy_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].network_policy - mock_val = vmwareengine.NetworkPolicy(name="name_value") + mock_val = vmwareengine_resources.NetworkPolicy(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -8057,7 +8073,7 @@ async def test_update_network_policy_flattened_error_async(): with pytest.raises(ValueError): await client.update_network_policy( vmwareengine.UpdateNetworkPolicyRequest(), - network_policy=vmwareengine.NetworkPolicy(name="name_value"), + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -8473,7 +8489,9 @@ def test_create_vmware_engine_network_flattened(): # using the keyword arguments to the method. client.create_vmware_engine_network( parent="parent_value", - vmware_engine_network=vmwareengine.VmwareEngineNetwork(name="name_value"), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), vmware_engine_network_id="vmware_engine_network_id_value", ) @@ -8485,7 +8503,7 @@ def test_create_vmware_engine_network_flattened(): mock_val = "parent_value" assert arg == mock_val arg = args[0].vmware_engine_network - mock_val = vmwareengine.VmwareEngineNetwork(name="name_value") + mock_val = vmwareengine_resources.VmwareEngineNetwork(name="name_value") assert arg == mock_val arg = args[0].vmware_engine_network_id mock_val = "vmware_engine_network_id_value" @@ -8503,7 +8521,9 @@ def test_create_vmware_engine_network_flattened_error(): client.create_vmware_engine_network( vmwareengine.CreateVmwareEngineNetworkRequest(), parent="parent_value", - vmware_engine_network=vmwareengine.VmwareEngineNetwork(name="name_value"), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), vmware_engine_network_id="vmware_engine_network_id_value", ) @@ -8528,7 +8548,9 @@ async def test_create_vmware_engine_network_flattened_async(): # using the keyword arguments to the method. response = await client.create_vmware_engine_network( parent="parent_value", - vmware_engine_network=vmwareengine.VmwareEngineNetwork(name="name_value"), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), vmware_engine_network_id="vmware_engine_network_id_value", ) @@ -8540,7 +8562,7 @@ async def test_create_vmware_engine_network_flattened_async(): mock_val = "parent_value" assert arg == mock_val arg = args[0].vmware_engine_network - mock_val = vmwareengine.VmwareEngineNetwork(name="name_value") + mock_val = vmwareengine_resources.VmwareEngineNetwork(name="name_value") assert arg == mock_val arg = args[0].vmware_engine_network_id mock_val = "vmware_engine_network_id_value" @@ -8559,7 +8581,9 @@ async def test_create_vmware_engine_network_flattened_error_async(): await client.create_vmware_engine_network( vmwareengine.CreateVmwareEngineNetworkRequest(), parent="parent_value", - vmware_engine_network=vmwareengine.VmwareEngineNetwork(name="name_value"), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), vmware_engine_network_id="vmware_engine_network_id_value", ) @@ -8733,7 +8757,9 @@ def test_update_vmware_engine_network_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_vmware_engine_network( - vmware_engine_network=vmwareengine.VmwareEngineNetwork(name="name_value"), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -8742,7 +8768,7 @@ def test_update_vmware_engine_network_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].vmware_engine_network - mock_val = vmwareengine.VmwareEngineNetwork(name="name_value") + mock_val = vmwareengine_resources.VmwareEngineNetwork(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -8759,7 +8785,9 @@ def test_update_vmware_engine_network_flattened_error(): with pytest.raises(ValueError): client.update_vmware_engine_network( vmwareengine.UpdateVmwareEngineNetworkRequest(), - vmware_engine_network=vmwareengine.VmwareEngineNetwork(name="name_value"), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -8783,7 +8811,9 @@ async def test_update_vmware_engine_network_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_vmware_engine_network( - vmware_engine_network=vmwareengine.VmwareEngineNetwork(name="name_value"), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -8792,7 +8822,7 @@ async def test_update_vmware_engine_network_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].vmware_engine_network - mock_val = vmwareengine.VmwareEngineNetwork(name="name_value") + mock_val = vmwareengine_resources.VmwareEngineNetwork(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -8810,7 +8840,9 @@ async def test_update_vmware_engine_network_flattened_error_async(): with pytest.raises(ValueError): await client.update_vmware_engine_network( vmwareengine.UpdateVmwareEngineNetworkRequest(), - vmware_engine_network=vmwareengine.VmwareEngineNetwork(name="name_value"), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -9078,11 +9110,11 @@ def test_get_vmware_engine_network(request_type, transport: str = "grpc"): type(client.transport.get_vmware_engine_network), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.VmwareEngineNetwork( + call.return_value = vmwareengine_resources.VmwareEngineNetwork( name="name_value", description="description_value", - state=vmwareengine.VmwareEngineNetwork.State.CREATING, - type_=vmwareengine.VmwareEngineNetwork.Type.LEGACY, + state=vmwareengine_resources.VmwareEngineNetwork.State.CREATING, + type_=vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY, uid="uid_value", etag="etag_value", ) @@ -9094,11 +9126,11 @@ def test_get_vmware_engine_network(request_type, transport: str = "grpc"): assert args[0] == vmwareengine.GetVmwareEngineNetworkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.VmwareEngineNetwork) + assert isinstance(response, vmwareengine_resources.VmwareEngineNetwork) assert response.name == "name_value" assert response.description == "description_value" - assert response.state == vmwareengine.VmwareEngineNetwork.State.CREATING - assert response.type_ == vmwareengine.VmwareEngineNetwork.Type.LEGACY + assert response.state == vmwareengine_resources.VmwareEngineNetwork.State.CREATING + assert response.type_ == vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY assert response.uid == "uid_value" assert response.etag == "etag_value" @@ -9141,11 +9173,11 @@ async def test_get_vmware_engine_network_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.VmwareEngineNetwork( + vmwareengine_resources.VmwareEngineNetwork( name="name_value", description="description_value", - state=vmwareengine.VmwareEngineNetwork.State.CREATING, - type_=vmwareengine.VmwareEngineNetwork.Type.LEGACY, + state=vmwareengine_resources.VmwareEngineNetwork.State.CREATING, + type_=vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY, uid="uid_value", etag="etag_value", ) @@ -9158,11 +9190,11 @@ async def test_get_vmware_engine_network_async( assert args[0] == vmwareengine.GetVmwareEngineNetworkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.VmwareEngineNetwork) + assert isinstance(response, vmwareengine_resources.VmwareEngineNetwork) assert response.name == "name_value" assert response.description == "description_value" - assert response.state == vmwareengine.VmwareEngineNetwork.State.CREATING - assert response.type_ == vmwareengine.VmwareEngineNetwork.Type.LEGACY + assert response.state == vmwareengine_resources.VmwareEngineNetwork.State.CREATING + assert response.type_ == vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY assert response.uid == "uid_value" assert response.etag == "etag_value" @@ -9187,7 +9219,7 @@ def test_get_vmware_engine_network_field_headers(): with mock.patch.object( type(client.transport.get_vmware_engine_network), "__call__" ) as call: - call.return_value = vmwareengine.VmwareEngineNetwork() + call.return_value = vmwareengine_resources.VmwareEngineNetwork() client.get_vmware_engine_network(request) # Establish that the underlying gRPC stub method was called. @@ -9220,7 +9252,7 @@ async def test_get_vmware_engine_network_field_headers_async(): type(client.transport.get_vmware_engine_network), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.VmwareEngineNetwork() + vmwareengine_resources.VmwareEngineNetwork() ) await client.get_vmware_engine_network(request) @@ -9247,7 +9279,7 @@ def test_get_vmware_engine_network_flattened(): type(client.transport.get_vmware_engine_network), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.VmwareEngineNetwork() + call.return_value = vmwareengine_resources.VmwareEngineNetwork() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_vmware_engine_network( @@ -9288,10 +9320,10 @@ async def test_get_vmware_engine_network_flattened_async(): type(client.transport.get_vmware_engine_network), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.VmwareEngineNetwork() + call.return_value = vmwareengine_resources.VmwareEngineNetwork() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.VmwareEngineNetwork() + vmwareengine_resources.VmwareEngineNetwork() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. @@ -9588,9 +9620,9 @@ def test_list_vmware_engine_networks_pager(transport_name: str = "grpc"): call.side_effect = ( vmwareengine.ListVmwareEngineNetworksResponse( vmware_engine_networks=[ - vmwareengine.VmwareEngineNetwork(), - vmwareengine.VmwareEngineNetwork(), - vmwareengine.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), ], next_page_token="abc", ), @@ -9600,14 +9632,14 @@ def test_list_vmware_engine_networks_pager(transport_name: str = "grpc"): ), vmwareengine.ListVmwareEngineNetworksResponse( vmware_engine_networks=[ - vmwareengine.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), ], next_page_token="ghi", ), vmwareengine.ListVmwareEngineNetworksResponse( vmware_engine_networks=[ - vmwareengine.VmwareEngineNetwork(), - vmwareengine.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), ], ), RuntimeError, @@ -9623,7 +9655,9 @@ def test_list_vmware_engine_networks_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine.VmwareEngineNetwork) for i in results) + assert all( + isinstance(i, vmwareengine_resources.VmwareEngineNetwork) for i in results + ) def test_list_vmware_engine_networks_pages(transport_name: str = "grpc"): @@ -9640,9 +9674,9 @@ def test_list_vmware_engine_networks_pages(transport_name: str = "grpc"): call.side_effect = ( vmwareengine.ListVmwareEngineNetworksResponse( vmware_engine_networks=[ - vmwareengine.VmwareEngineNetwork(), - vmwareengine.VmwareEngineNetwork(), - vmwareengine.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), ], next_page_token="abc", ), @@ -9652,14 +9686,14 @@ def test_list_vmware_engine_networks_pages(transport_name: str = "grpc"): ), vmwareengine.ListVmwareEngineNetworksResponse( vmware_engine_networks=[ - vmwareengine.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), ], next_page_token="ghi", ), vmwareengine.ListVmwareEngineNetworksResponse( vmware_engine_networks=[ - vmwareengine.VmwareEngineNetwork(), - vmwareengine.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), ], ), RuntimeError, @@ -9685,9 +9719,9 @@ async def test_list_vmware_engine_networks_async_pager(): call.side_effect = ( vmwareengine.ListVmwareEngineNetworksResponse( vmware_engine_networks=[ - vmwareengine.VmwareEngineNetwork(), - vmwareengine.VmwareEngineNetwork(), - vmwareengine.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), ], next_page_token="abc", ), @@ -9697,14 +9731,14 @@ async def test_list_vmware_engine_networks_async_pager(): ), vmwareengine.ListVmwareEngineNetworksResponse( vmware_engine_networks=[ - vmwareengine.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), ], next_page_token="ghi", ), vmwareengine.ListVmwareEngineNetworksResponse( vmware_engine_networks=[ - vmwareengine.VmwareEngineNetwork(), - vmwareengine.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), ], ), RuntimeError, @@ -9718,7 +9752,9 @@ async def test_list_vmware_engine_networks_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, vmwareengine.VmwareEngineNetwork) for i in responses) + assert all( + isinstance(i, vmwareengine_resources.VmwareEngineNetwork) for i in responses + ) @pytest.mark.asyncio @@ -9737,9 +9773,9 @@ async def test_list_vmware_engine_networks_async_pages(): call.side_effect = ( vmwareengine.ListVmwareEngineNetworksResponse( vmware_engine_networks=[ - vmwareengine.VmwareEngineNetwork(), - vmwareengine.VmwareEngineNetwork(), - vmwareengine.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), ], next_page_token="abc", ), @@ -9749,14 +9785,14 @@ async def test_list_vmware_engine_networks_async_pages(): ), vmwareengine.ListVmwareEngineNetworksResponse( vmware_engine_networks=[ - vmwareengine.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), ], next_page_token="ghi", ), vmwareengine.ListVmwareEngineNetworksResponse( vmware_engine_networks=[ - vmwareengine.VmwareEngineNetwork(), - vmwareengine.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), ], ), RuntimeError, @@ -10067,9 +10103,9 @@ def test_list_private_clouds_rest_pager(transport: str = "rest"): response = ( vmwareengine.ListPrivateCloudsResponse( private_clouds=[ - vmwareengine.PrivateCloud(), - vmwareengine.PrivateCloud(), - vmwareengine.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), ], next_page_token="abc", ), @@ -10079,14 +10115,14 @@ def test_list_private_clouds_rest_pager(transport: str = "rest"): ), vmwareengine.ListPrivateCloudsResponse( private_clouds=[ - vmwareengine.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), ], next_page_token="ghi", ), vmwareengine.ListPrivateCloudsResponse( private_clouds=[ - vmwareengine.PrivateCloud(), - vmwareengine.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), ], ), ) @@ -10109,7 +10145,7 @@ def test_list_private_clouds_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine.PrivateCloud) for i in results) + assert all(isinstance(i, vmwareengine_resources.PrivateCloud) for i in results) pages = list(client.list_private_clouds(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -10136,9 +10172,9 @@ def test_get_private_cloud_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.PrivateCloud( + return_value = vmwareengine_resources.PrivateCloud( name="name_value", - state=vmwareengine.PrivateCloud.State.ACTIVE, + state=vmwareengine_resources.PrivateCloud.State.ACTIVE, description="description_value", uid="uid_value", ) @@ -10146,7 +10182,7 @@ def test_get_private_cloud_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.PrivateCloud.pb(return_value) + pb_return_value = vmwareengine_resources.PrivateCloud.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -10154,9 +10190,9 @@ def test_get_private_cloud_rest(request_type): response = client.get_private_cloud(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.PrivateCloud) + assert isinstance(response, vmwareengine_resources.PrivateCloud) assert response.name == "name_value" - assert response.state == vmwareengine.PrivateCloud.State.ACTIVE + assert response.state == vmwareengine_resources.PrivateCloud.State.ACTIVE assert response.description == "description_value" assert response.uid == "uid_value" @@ -10205,7 +10241,7 @@ def test_get_private_cloud_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.PrivateCloud() + return_value = vmwareengine_resources.PrivateCloud() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -10225,7 +10261,7 @@ def test_get_private_cloud_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.PrivateCloud.pb(return_value) + pb_return_value = vmwareengine_resources.PrivateCloud.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -10280,8 +10316,8 @@ def test_get_private_cloud_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine.PrivateCloud.to_json( - vmwareengine.PrivateCloud() + req.return_value._content = vmwareengine_resources.PrivateCloud.to_json( + vmwareengine_resources.PrivateCloud() ) request = vmwareengine.GetPrivateCloudRequest() @@ -10290,7 +10326,7 @@ def test_get_private_cloud_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.PrivateCloud() + post.return_value = vmwareengine_resources.PrivateCloud() client.get_private_cloud( request, @@ -10337,7 +10373,7 @@ def test_get_private_cloud_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.PrivateCloud() + return_value = vmwareengine_resources.PrivateCloud() # get arguments that satisfy an http rule for this method sample_request = { @@ -10353,7 +10389,7 @@ def test_get_private_cloud_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.PrivateCloud.pb(return_value) + pb_return_value = vmwareengine_resources.PrivateCloud.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10724,7 +10760,7 @@ def test_create_private_cloud_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - private_cloud=vmwareengine.PrivateCloud(name="name_value"), + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), private_cloud_id="private_cloud_id_value", ) mock_args.update(sample_request) @@ -10761,7 +10797,7 @@ def test_create_private_cloud_rest_flattened_error(transport: str = "rest"): client.create_private_cloud( vmwareengine.CreatePrivateCloudRequest(), parent="parent_value", - private_cloud=vmwareengine.PrivateCloud(name="name_value"), + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), private_cloud_id="private_cloud_id_value", ) @@ -11094,7 +11130,7 @@ def test_update_private_cloud_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - private_cloud=vmwareengine.PrivateCloud(name="name_value"), + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -11130,7 +11166,7 @@ def test_update_private_cloud_rest_flattened_error(transport: str = "rest"): with pytest.raises(ValueError): client.update_private_cloud( vmwareengine.UpdatePrivateCloudRequest(), - private_cloud=vmwareengine.PrivateCloud(name="name_value"), + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -11992,9 +12028,9 @@ def test_list_clusters_rest_pager(transport: str = "rest"): response = ( vmwareengine.ListClustersResponse( clusters=[ - vmwareengine.Cluster(), - vmwareengine.Cluster(), - vmwareengine.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), ], next_page_token="abc", ), @@ -12004,14 +12040,14 @@ def test_list_clusters_rest_pager(transport: str = "rest"): ), vmwareengine.ListClustersResponse( clusters=[ - vmwareengine.Cluster(), + vmwareengine_resources.Cluster(), ], next_page_token="ghi", ), vmwareengine.ListClustersResponse( clusters=[ - vmwareengine.Cluster(), - vmwareengine.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), ], ), ) @@ -12034,7 +12070,7 @@ def test_list_clusters_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine.Cluster) for i in results) + assert all(isinstance(i, vmwareengine_resources.Cluster) for i in results) pages = list(client.list_clusters(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -12063,9 +12099,9 @@ def test_get_cluster_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.Cluster( + return_value = vmwareengine_resources.Cluster( name="name_value", - state=vmwareengine.Cluster.State.ACTIVE, + state=vmwareengine_resources.Cluster.State.ACTIVE, management=True, uid="uid_value", ) @@ -12073,7 +12109,7 @@ def test_get_cluster_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.Cluster.pb(return_value) + pb_return_value = vmwareengine_resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -12081,9 +12117,9 @@ def test_get_cluster_rest(request_type): response = client.get_cluster(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.Cluster) + assert isinstance(response, vmwareengine_resources.Cluster) assert response.name == "name_value" - assert response.state == vmwareengine.Cluster.State.ACTIVE + assert response.state == vmwareengine_resources.Cluster.State.ACTIVE assert response.management is True assert response.uid == "uid_value" @@ -12130,7 +12166,7 @@ def test_get_cluster_rest_required_fields(request_type=vmwareengine.GetClusterRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.Cluster() + return_value = vmwareengine_resources.Cluster() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12150,7 +12186,7 @@ def test_get_cluster_rest_required_fields(request_type=vmwareengine.GetClusterRe response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.Cluster.pb(return_value) + pb_return_value = vmwareengine_resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -12203,7 +12239,9 @@ def test_get_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine.Cluster.to_json(vmwareengine.Cluster()) + req.return_value._content = vmwareengine_resources.Cluster.to_json( + vmwareengine_resources.Cluster() + ) request = vmwareengine.GetClusterRequest() metadata = [ @@ -12211,7 +12249,7 @@ def test_get_cluster_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.Cluster() + post.return_value = vmwareengine_resources.Cluster() client.get_cluster( request, @@ -12260,7 +12298,7 @@ def test_get_cluster_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.Cluster() + return_value = vmwareengine_resources.Cluster() # get arguments that satisfy an http rule for this method sample_request = { @@ -12276,7 +12314,7 @@ def test_get_cluster_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.Cluster.pb(return_value) + pb_return_value = vmwareengine_resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12595,7 +12633,7 @@ def test_create_cluster_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - cluster=vmwareengine.Cluster(name="name_value"), + cluster=vmwareengine_resources.Cluster(name="name_value"), cluster_id="cluster_id_value", ) mock_args.update(sample_request) @@ -12632,7 +12670,7 @@ def test_create_cluster_rest_flattened_error(transport: str = "rest"): client.create_cluster( vmwareengine.CreateClusterRequest(), parent="parent_value", - cluster=vmwareengine.Cluster(name="name_value"), + cluster=vmwareengine_resources.Cluster(name="name_value"), cluster_id="cluster_id_value", ) @@ -12909,7 +12947,7 @@ def test_update_cluster_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - cluster=vmwareengine.Cluster(name="name_value"), + cluster=vmwareengine_resources.Cluster(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -12945,7 +12983,7 @@ def test_update_cluster_rest_flattened_error(transport: str = "rest"): with pytest.raises(ValueError): client.update_cluster( vmwareengine.UpdateClusterRequest(), - cluster=vmwareengine.Cluster(name="name_value"), + cluster=vmwareengine_resources.Cluster(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -13524,9 +13562,9 @@ def test_list_subnets_rest_pager(transport: str = "rest"): response = ( vmwareengine.ListSubnetsResponse( subnets=[ - vmwareengine.Subnet(), - vmwareengine.Subnet(), - vmwareengine.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), ], next_page_token="abc", ), @@ -13536,14 +13574,14 @@ def test_list_subnets_rest_pager(transport: str = "rest"): ), vmwareengine.ListSubnetsResponse( subnets=[ - vmwareengine.Subnet(), + vmwareengine_resources.Subnet(), ], next_page_token="ghi", ), vmwareengine.ListSubnetsResponse( subnets=[ - vmwareengine.Subnet(), - vmwareengine.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), ], ), ) @@ -13566,7 +13604,7 @@ def test_list_subnets_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine.Subnet) for i in results) + assert all(isinstance(i, vmwareengine_resources.Subnet) for i in results) pages = list(client.list_subnets(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -13867,9 +13905,9 @@ def test_list_node_types_rest_pager(transport: str = "rest"): response = ( vmwareengine.ListNodeTypesResponse( node_types=[ - vmwareengine.NodeType(), - vmwareengine.NodeType(), - vmwareengine.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), ], next_page_token="abc", ), @@ -13879,14 +13917,14 @@ def test_list_node_types_rest_pager(transport: str = "rest"): ), vmwareengine.ListNodeTypesResponse( node_types=[ - vmwareengine.NodeType(), + vmwareengine_resources.NodeType(), ], next_page_token="ghi", ), vmwareengine.ListNodeTypesResponse( node_types=[ - vmwareengine.NodeType(), - vmwareengine.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), ], ), ) @@ -13909,7 +13947,7 @@ def test_list_node_types_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine.NodeType) for i in results) + assert all(isinstance(i, vmwareengine_resources.NodeType) for i in results) pages = list(client.list_node_types(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -13936,7 +13974,7 @@ def test_get_node_type_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.NodeType( + return_value = vmwareengine_resources.NodeType( name="name_value", node_type_id="node_type_id_value", display_name="display_name_value", @@ -13950,7 +13988,7 @@ def test_get_node_type_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.NodeType.pb(return_value) + pb_return_value = vmwareengine_resources.NodeType.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -13958,7 +13996,7 @@ def test_get_node_type_rest(request_type): response = client.get_node_type(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.NodeType) + assert isinstance(response, vmwareengine_resources.NodeType) assert response.name == "name_value" assert response.node_type_id == "node_type_id_value" assert response.display_name == "display_name_value" @@ -14013,7 +14051,7 @@ def test_get_node_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.NodeType() + return_value = vmwareengine_resources.NodeType() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14033,7 +14071,7 @@ def test_get_node_type_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.NodeType.pb(return_value) + pb_return_value = vmwareengine_resources.NodeType.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -14088,8 +14126,8 @@ def test_get_node_type_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine.NodeType.to_json( - vmwareengine.NodeType() + req.return_value._content = vmwareengine_resources.NodeType.to_json( + vmwareengine_resources.NodeType() ) request = vmwareengine.GetNodeTypeRequest() @@ -14098,7 +14136,7 @@ def test_get_node_type_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.NodeType() + post.return_value = vmwareengine_resources.NodeType() client.get_node_type( request, @@ -14145,7 +14183,7 @@ def test_get_node_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.NodeType() + return_value = vmwareengine_resources.NodeType() # get arguments that satisfy an http rule for this method sample_request = { @@ -14161,7 +14199,7 @@ def test_get_node_type_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.NodeType.pb(return_value) + pb_return_value = vmwareengine_resources.NodeType.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -14221,7 +14259,7 @@ def test_show_nsx_credentials_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.Credentials( + return_value = vmwareengine_resources.Credentials( username="username_value", password="password_value", ) @@ -14229,7 +14267,7 @@ def test_show_nsx_credentials_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.Credentials.pb(return_value) + pb_return_value = vmwareengine_resources.Credentials.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -14237,7 +14275,7 @@ def test_show_nsx_credentials_rest(request_type): response = client.show_nsx_credentials(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.Credentials) + assert isinstance(response, vmwareengine_resources.Credentials) assert response.username == "username_value" assert response.password == "password_value" @@ -14286,7 +14324,7 @@ def test_show_nsx_credentials_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.Credentials() + return_value = vmwareengine_resources.Credentials() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14306,7 +14344,7 @@ def test_show_nsx_credentials_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.Credentials.pb(return_value) + pb_return_value = vmwareengine_resources.Credentials.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -14361,8 +14399,8 @@ def test_show_nsx_credentials_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine.Credentials.to_json( - vmwareengine.Credentials() + req.return_value._content = vmwareengine_resources.Credentials.to_json( + vmwareengine_resources.Credentials() ) request = vmwareengine.ShowNsxCredentialsRequest() @@ -14371,7 +14409,7 @@ def test_show_nsx_credentials_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.Credentials() + post.return_value = vmwareengine_resources.Credentials() client.show_nsx_credentials( request, @@ -14420,7 +14458,7 @@ def test_show_nsx_credentials_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.Credentials() + return_value = vmwareengine_resources.Credentials() # get arguments that satisfy an http rule for this method sample_request = { @@ -14436,7 +14474,7 @@ def test_show_nsx_credentials_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.Credentials.pb(return_value) + pb_return_value = vmwareengine_resources.Credentials.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -14497,7 +14535,7 @@ def test_show_vcenter_credentials_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.Credentials( + return_value = vmwareengine_resources.Credentials( username="username_value", password="password_value", ) @@ -14505,7 +14543,7 @@ def test_show_vcenter_credentials_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.Credentials.pb(return_value) + pb_return_value = vmwareengine_resources.Credentials.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -14513,7 +14551,7 @@ def test_show_vcenter_credentials_rest(request_type): response = client.show_vcenter_credentials(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.Credentials) + assert isinstance(response, vmwareengine_resources.Credentials) assert response.username == "username_value" assert response.password == "password_value" @@ -14562,7 +14600,7 @@ def test_show_vcenter_credentials_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.Credentials() + return_value = vmwareengine_resources.Credentials() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14582,7 +14620,7 @@ def test_show_vcenter_credentials_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.Credentials.pb(return_value) + pb_return_value = vmwareengine_resources.Credentials.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -14637,8 +14675,8 @@ def test_show_vcenter_credentials_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine.Credentials.to_json( - vmwareengine.Credentials() + req.return_value._content = vmwareengine_resources.Credentials.to_json( + vmwareengine_resources.Credentials() ) request = vmwareengine.ShowVcenterCredentialsRequest() @@ -14647,7 +14685,7 @@ def test_show_vcenter_credentials_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.Credentials() + post.return_value = vmwareengine_resources.Credentials() client.show_vcenter_credentials( request, @@ -14696,7 +14734,7 @@ def test_show_vcenter_credentials_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.Credentials() + return_value = vmwareengine_resources.Credentials() # get arguments that satisfy an http rule for this method sample_request = { @@ -14712,7 +14750,7 @@ def test_show_vcenter_credentials_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.Credentials.pb(return_value) + pb_return_value = vmwareengine_resources.Credentials.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -15567,7 +15605,9 @@ def test_create_hcx_activation_key_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - hcx_activation_key=vmwareengine.HcxActivationKey(name="name_value"), + hcx_activation_key=vmwareengine_resources.HcxActivationKey( + name="name_value" + ), hcx_activation_key_id="hcx_activation_key_id_value", ) mock_args.update(sample_request) @@ -15604,7 +15644,9 @@ def test_create_hcx_activation_key_rest_flattened_error(transport: str = "rest") client.create_hcx_activation_key( vmwareengine.CreateHcxActivationKeyRequest(), parent="parent_value", - hcx_activation_key=vmwareengine.HcxActivationKey(name="name_value"), + hcx_activation_key=vmwareengine_resources.HcxActivationKey( + name="name_value" + ), hcx_activation_key_id="hcx_activation_key_id_value", ) @@ -15916,9 +15958,9 @@ def test_list_hcx_activation_keys_rest_pager(transport: str = "rest"): response = ( vmwareengine.ListHcxActivationKeysResponse( hcx_activation_keys=[ - vmwareengine.HcxActivationKey(), - vmwareengine.HcxActivationKey(), - vmwareengine.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), ], next_page_token="abc", ), @@ -15928,14 +15970,14 @@ def test_list_hcx_activation_keys_rest_pager(transport: str = "rest"): ), vmwareengine.ListHcxActivationKeysResponse( hcx_activation_keys=[ - vmwareengine.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), ], next_page_token="ghi", ), vmwareengine.ListHcxActivationKeysResponse( hcx_activation_keys=[ - vmwareengine.HcxActivationKey(), - vmwareengine.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), ], ), ) @@ -15960,7 +16002,9 @@ def test_list_hcx_activation_keys_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine.HcxActivationKey) for i in results) + assert all( + isinstance(i, vmwareengine_resources.HcxActivationKey) for i in results + ) pages = list(client.list_hcx_activation_keys(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -15989,9 +16033,9 @@ def test_get_hcx_activation_key_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.HcxActivationKey( + return_value = vmwareengine_resources.HcxActivationKey( name="name_value", - state=vmwareengine.HcxActivationKey.State.AVAILABLE, + state=vmwareengine_resources.HcxActivationKey.State.AVAILABLE, activation_key="activation_key_value", uid="uid_value", ) @@ -15999,7 +16043,7 @@ def test_get_hcx_activation_key_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.HcxActivationKey.pb(return_value) + pb_return_value = vmwareengine_resources.HcxActivationKey.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -16007,9 +16051,9 @@ def test_get_hcx_activation_key_rest(request_type): response = client.get_hcx_activation_key(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.HcxActivationKey) + assert isinstance(response, vmwareengine_resources.HcxActivationKey) assert response.name == "name_value" - assert response.state == vmwareengine.HcxActivationKey.State.AVAILABLE + assert response.state == vmwareengine_resources.HcxActivationKey.State.AVAILABLE assert response.activation_key == "activation_key_value" assert response.uid == "uid_value" @@ -16058,7 +16102,7 @@ def test_get_hcx_activation_key_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.HcxActivationKey() + return_value = vmwareengine_resources.HcxActivationKey() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16078,7 +16122,7 @@ def test_get_hcx_activation_key_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.HcxActivationKey.pb(return_value) + pb_return_value = vmwareengine_resources.HcxActivationKey.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -16133,8 +16177,8 @@ def test_get_hcx_activation_key_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine.HcxActivationKey.to_json( - vmwareengine.HcxActivationKey() + req.return_value._content = vmwareengine_resources.HcxActivationKey.to_json( + vmwareengine_resources.HcxActivationKey() ) request = vmwareengine.GetHcxActivationKeyRequest() @@ -16143,7 +16187,7 @@ def test_get_hcx_activation_key_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.HcxActivationKey() + post.return_value = vmwareengine_resources.HcxActivationKey() client.get_hcx_activation_key( request, @@ -16192,7 +16236,7 @@ def test_get_hcx_activation_key_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.HcxActivationKey() + return_value = vmwareengine_resources.HcxActivationKey() # get arguments that satisfy an http rule for this method sample_request = { @@ -16208,7 +16252,7 @@ def test_get_hcx_activation_key_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.HcxActivationKey.pb(return_value) + pb_return_value = vmwareengine_resources.HcxActivationKey.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -16269,7 +16313,7 @@ def test_get_network_policy_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.NetworkPolicy( + return_value = vmwareengine_resources.NetworkPolicy( name="name_value", edge_services_cidr="edge_services_cidr_value", uid="uid_value", @@ -16281,7 +16325,7 @@ def test_get_network_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.NetworkPolicy.pb(return_value) + pb_return_value = vmwareengine_resources.NetworkPolicy.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -16289,7 +16333,7 @@ def test_get_network_policy_rest(request_type): response = client.get_network_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.NetworkPolicy) + assert isinstance(response, vmwareengine_resources.NetworkPolicy) assert response.name == "name_value" assert response.edge_services_cidr == "edge_services_cidr_value" assert response.uid == "uid_value" @@ -16345,7 +16389,7 @@ def test_get_network_policy_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.NetworkPolicy() + return_value = vmwareengine_resources.NetworkPolicy() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16365,7 +16409,7 @@ def test_get_network_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.NetworkPolicy.pb(return_value) + pb_return_value = vmwareengine_resources.NetworkPolicy.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -16420,8 +16464,8 @@ def test_get_network_policy_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine.NetworkPolicy.to_json( - vmwareengine.NetworkPolicy() + req.return_value._content = vmwareengine_resources.NetworkPolicy.to_json( + vmwareengine_resources.NetworkPolicy() ) request = vmwareengine.GetNetworkPolicyRequest() @@ -16430,7 +16474,7 @@ def test_get_network_policy_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.NetworkPolicy() + post.return_value = vmwareengine_resources.NetworkPolicy() client.get_network_policy( request, @@ -16479,7 +16523,7 @@ def test_get_network_policy_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.NetworkPolicy() + return_value = vmwareengine_resources.NetworkPolicy() # get arguments that satisfy an http rule for this method sample_request = { @@ -16495,7 +16539,7 @@ def test_get_network_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.NetworkPolicy.pb(return_value) + pb_return_value = vmwareengine_resources.NetworkPolicy.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -16831,9 +16875,9 @@ def test_list_network_policies_rest_pager(transport: str = "rest"): response = ( vmwareengine.ListNetworkPoliciesResponse( network_policies=[ - vmwareengine.NetworkPolicy(), - vmwareengine.NetworkPolicy(), - vmwareengine.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), ], next_page_token="abc", ), @@ -16843,14 +16887,14 @@ def test_list_network_policies_rest_pager(transport: str = "rest"): ), vmwareengine.ListNetworkPoliciesResponse( network_policies=[ - vmwareengine.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), ], next_page_token="ghi", ), vmwareengine.ListNetworkPoliciesResponse( network_policies=[ - vmwareengine.NetworkPolicy(), - vmwareengine.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), ], ), ) @@ -16873,7 +16917,7 @@ def test_list_network_policies_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine.NetworkPolicy) for i in results) + assert all(isinstance(i, vmwareengine_resources.NetworkPolicy) for i in results) pages = list(client.list_network_policies(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -17158,7 +17202,7 @@ def test_create_network_policy_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - network_policy=vmwareengine.NetworkPolicy(name="name_value"), + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), network_policy_id="network_policy_id_value", ) mock_args.update(sample_request) @@ -17195,7 +17239,7 @@ def test_create_network_policy_rest_flattened_error(transport: str = "rest"): client.create_network_policy( vmwareengine.CreateNetworkPolicyRequest(), parent="parent_value", - network_policy=vmwareengine.NetworkPolicy(name="name_value"), + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), network_policy_id="network_policy_id_value", ) @@ -17476,7 +17520,7 @@ def test_update_network_policy_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - network_policy=vmwareengine.NetworkPolicy(name="name_value"), + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -17512,7 +17556,7 @@ def test_update_network_policy_rest_flattened_error(transport: str = "rest"): with pytest.raises(ValueError): client.update_network_policy( vmwareengine.UpdateNetworkPolicyRequest(), - network_policy=vmwareengine.NetworkPolicy(name="name_value"), + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -18075,7 +18119,9 @@ def test_create_vmware_engine_network_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - vmware_engine_network=vmwareengine.VmwareEngineNetwork(name="name_value"), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), vmware_engine_network_id="vmware_engine_network_id_value", ) mock_args.update(sample_request) @@ -18112,7 +18158,9 @@ def test_create_vmware_engine_network_rest_flattened_error(transport: str = "res client.create_vmware_engine_network( vmwareengine.CreateVmwareEngineNetworkRequest(), parent="parent_value", - vmware_engine_network=vmwareengine.VmwareEngineNetwork(name="name_value"), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), vmware_engine_network_id="vmware_engine_network_id_value", ) @@ -18391,7 +18439,9 @@ def test_update_vmware_engine_network_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - vmware_engine_network=vmwareengine.VmwareEngineNetwork(name="name_value"), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -18427,7 +18477,9 @@ def test_update_vmware_engine_network_rest_flattened_error(transport: str = "res with pytest.raises(ValueError): client.update_vmware_engine_network( vmwareengine.UpdateVmwareEngineNetworkRequest(), - vmware_engine_network=vmwareengine.VmwareEngineNetwork(name="name_value"), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -18744,11 +18796,11 @@ def test_get_vmware_engine_network_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.VmwareEngineNetwork( + return_value = vmwareengine_resources.VmwareEngineNetwork( name="name_value", description="description_value", - state=vmwareengine.VmwareEngineNetwork.State.CREATING, - type_=vmwareengine.VmwareEngineNetwork.Type.LEGACY, + state=vmwareengine_resources.VmwareEngineNetwork.State.CREATING, + type_=vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY, uid="uid_value", etag="etag_value", ) @@ -18756,7 +18808,7 @@ def test_get_vmware_engine_network_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.VmwareEngineNetwork.pb(return_value) + pb_return_value = vmwareengine_resources.VmwareEngineNetwork.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -18764,11 +18816,11 @@ def test_get_vmware_engine_network_rest(request_type): response = client.get_vmware_engine_network(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine.VmwareEngineNetwork) + assert isinstance(response, vmwareengine_resources.VmwareEngineNetwork) assert response.name == "name_value" assert response.description == "description_value" - assert response.state == vmwareengine.VmwareEngineNetwork.State.CREATING - assert response.type_ == vmwareengine.VmwareEngineNetwork.Type.LEGACY + assert response.state == vmwareengine_resources.VmwareEngineNetwork.State.CREATING + assert response.type_ == vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY assert response.uid == "uid_value" assert response.etag == "etag_value" @@ -18817,7 +18869,7 @@ def test_get_vmware_engine_network_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.VmwareEngineNetwork() + return_value = vmwareengine_resources.VmwareEngineNetwork() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18837,7 +18889,9 @@ def test_get_vmware_engine_network_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.VmwareEngineNetwork.pb(return_value) + pb_return_value = vmwareengine_resources.VmwareEngineNetwork.pb( + return_value + ) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -18892,8 +18946,8 @@ def test_get_vmware_engine_network_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine.VmwareEngineNetwork.to_json( - vmwareengine.VmwareEngineNetwork() + req.return_value._content = vmwareengine_resources.VmwareEngineNetwork.to_json( + vmwareengine_resources.VmwareEngineNetwork() ) request = vmwareengine.GetVmwareEngineNetworkRequest() @@ -18902,7 +18956,7 @@ def test_get_vmware_engine_network_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.VmwareEngineNetwork() + post.return_value = vmwareengine_resources.VmwareEngineNetwork() client.get_vmware_engine_network( request, @@ -18951,7 +19005,7 @@ def test_get_vmware_engine_network_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.VmwareEngineNetwork() + return_value = vmwareengine_resources.VmwareEngineNetwork() # get arguments that satisfy an http rule for this method sample_request = { @@ -18967,7 +19021,7 @@ def test_get_vmware_engine_network_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = vmwareengine.VmwareEngineNetwork.pb(return_value) + pb_return_value = vmwareengine_resources.VmwareEngineNetwork.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -19307,9 +19361,9 @@ def test_list_vmware_engine_networks_rest_pager(transport: str = "rest"): response = ( vmwareengine.ListVmwareEngineNetworksResponse( vmware_engine_networks=[ - vmwareengine.VmwareEngineNetwork(), - vmwareengine.VmwareEngineNetwork(), - vmwareengine.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), ], next_page_token="abc", ), @@ -19319,14 +19373,14 @@ def test_list_vmware_engine_networks_rest_pager(transport: str = "rest"): ), vmwareengine.ListVmwareEngineNetworksResponse( vmware_engine_networks=[ - vmwareengine.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), ], next_page_token="ghi", ), vmwareengine.ListVmwareEngineNetworksResponse( vmware_engine_networks=[ - vmwareengine.VmwareEngineNetwork(), - vmwareengine.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), ], ), ) @@ -19349,7 +19403,9 @@ def test_list_vmware_engine_networks_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine.VmwareEngineNetwork) for i in results) + assert all( + isinstance(i, vmwareengine_resources.VmwareEngineNetwork) for i in results + ) pages = list(client.list_vmware_engine_networks(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): diff --git a/packages/google-cloud-workstations/.OwlBot.yaml b/packages/google-cloud-workstations/.OwlBot.yaml new file mode 100644 index 000000000000..1ad29ab2066d --- /dev/null +++ b/packages/google-cloud-workstations/.OwlBot.yaml @@ -0,0 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/workstations/(.*)/.*-py + dest: /owl-bot-staging/google-cloud-workstations/$1 diff --git a/packages/google-cloud-workstations/.coveragerc b/packages/google-cloud-workstations/.coveragerc new file mode 100644 index 000000000000..ee1a17c3e2ca --- /dev/null +++ b/packages/google-cloud-workstations/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/workstations/__init__.py + google/cloud/workstations/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-workstations/.flake8 b/packages/google-cloud-workstations/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-cloud-workstations/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-workstations/.gitignore b/packages/google-cloud-workstations/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-workstations/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-workstations/.repo-metadata.json b/packages/google-cloud-workstations/.repo-metadata.json new file mode 100644 index 000000000000..0f6aec78a046 --- /dev/null +++ b/packages/google-cloud-workstations/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "workstations", + "name_pretty": "Cloud Workstations API", + "api_description": "Cloud Workstations API", + "product_documentation": "https://cloud.google.com/workstations/docs", + "client_documentation": "https://cloud.google.com/python/docs/reference/workstations/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1328344", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-workstations", + "api_id": "workstations.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "workstations" +} diff --git a/packages/google-cloud-workstations/CHANGELOG.md b/packages/google-cloud-workstations/CHANGELOG.md new file mode 100644 index 000000000000..acf140c8b5ea --- /dev/null +++ b/packages/google-cloud-workstations/CHANGELOG.md @@ -0,0 +1,61 @@ +# Changelog + +## [0.4.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-workstations-v0.3.0...google-cloud-workstations-v0.4.0) (2023-05-17) + + +### Features + +* **v1beta:** add auditd support ([78957ed](https://github.com/googleapis/google-cloud-python/commit/78957ed60b63eb3c5d26912934c77c99b288c3a7)) +* **v1beta:** add output field for the control plane IP address ([78957ed](https://github.com/googleapis/google-cloud-python/commit/78957ed60b63eb3c5d26912934c77c99b288c3a7)) +* **v1beta:** add output field for the number of pooled instances ([78957ed](https://github.com/googleapis/google-cloud-python/commit/78957ed60b63eb3c5d26912934c77c99b288c3a7)) +* **v1beta:** add support for accelerators ([78957ed](https://github.com/googleapis/google-cloud-python/commit/78957ed60b63eb3c5d26912934c77c99b288c3a7)) +* **v1beta:** add support for readiness checks ([78957ed](https://github.com/googleapis/google-cloud-python/commit/78957ed60b63eb3c5d26912934c77c99b288c3a7)) +* **v1beta:** add support for workstation-level environment variables ([78957ed](https://github.com/googleapis/google-cloud-python/commit/78957ed60b63eb3c5d26912934c77c99b288c3a7)) + + +### Documentation + +* **v1beta:** adjust documentation wording ([78957ed](https://github.com/googleapis/google-cloud-python/commit/78957ed60b63eb3c5d26912934c77c99b288c3a7)) + +## [0.3.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-workstations-v0.2.1...google-cloud-workstations-v0.3.0) (2023-05-11) + + +### Features + +* add output field for the control plane IP address ([60e16ee](https://github.com/googleapis/google-cloud-python/commit/60e16ee099ab5522563e44d307a76af4c8177288)) +* add output field for the number of pooled instances ([60e16ee](https://github.com/googleapis/google-cloud-python/commit/60e16ee099ab5522563e44d307a76af4c8177288)) + + +### Documentation + +* adjust documentation wording ([60e16ee](https://github.com/googleapis/google-cloud-python/commit/60e16ee099ab5522563e44d307a76af4c8177288)) + +## [0.2.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-workstations-v0.2.0...google-cloud-workstations-v0.2.1) (2023-04-11) + + +### Documentation + +* Adjust wording around service accounts and control planes ([#11057](https://github.com/googleapis/google-cloud-python/issues/11057)) ([9f5b7c5](https://github.com/googleapis/google-cloud-python/commit/9f5b7c5a03b34fa1219c7218fa239ea6d829a949)) + +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-workstations-v0.1.1...google-cloud-workstations-v0.2.0) (2023-04-06) + + +### Features + +* add client libraries for Workstations v1 ([#11051](https://github.com/googleapis/google-cloud-python/issues/11051)) ([0113725](https://github.com/googleapis/google-cloud-python/commit/011372504f796156bc929ce414a9aa96bd73360c)) + +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-workstations-v0.1.0...google-cloud-workstations-v0.1.1) (2023-03-25) + + +### Documentation + +* Fix formatting of request arg in docstring ([#10867](https://github.com/googleapis/google-cloud-python/issues/10867)) ([d34a425](https://github.com/googleapis/google-cloud-python/commit/d34a425f7d0f02bebaf20d24b725b8c25c699697)) + +## 0.1.0 (2023-03-17) + + +### Features + +* add initial files for google.cloud.workstations.v1beta ([#10858](https://github.com/googleapis/google-cloud-python/issues/10858)) ([c9650f7](https://github.com/googleapis/google-cloud-python/commit/c9650f74cbf6ee04d1ccbd17947b66a0fecd4237)) + +## Changelog diff --git a/packages/google-cloud-workstations/CODE_OF_CONDUCT.md b/packages/google-cloud-workstations/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-workstations/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-workstations/CONTRIBUTING.rst b/packages/google-cloud-workstations/CONTRIBUTING.rst new file mode 100644 index 000000000000..3efdcb2c96fb --- /dev/null +++ b/packages/google-cloud-workstations/CONTRIBUTING.rst @@ -0,0 +1,281 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-workstations + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +We also explicitly decided to support Python 3 beginning with version 3.7. +Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-workstations/LICENSE b/packages/google-cloud-workstations/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-workstations/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-workstations/MANIFEST.in b/packages/google-cloud-workstations/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-cloud-workstations/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-workstations/README.rst b/packages/google-cloud-workstations/README.rst new file mode 100644 index 000000000000..354a34b1b556 --- /dev/null +++ b/packages/google-cloud-workstations/README.rst @@ -0,0 +1,107 @@ +Python Client for Cloud Workstations API +======================================== + +|preview| |pypi| |versions| + +`Cloud Workstations API`_: Cloud Workstations API + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-workstations.svg + :target: https://pypi.org/project/google-cloud-workstations/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-workstations.svg + :target: https://pypi.org/project/google-cloud-workstations/ +.. _Cloud Workstations API: https://cloud.google.com/workstations/docs +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/workstations/latest +.. _Product Documentation: https://cloud.google.com/workstations/docs + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud Workstations API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud Workstations API.: https://cloud.google.com/workstations/docs +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-workstations + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-workstations + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud Workstations API + to see other available methods on the client. +- Read the `Cloud Workstations API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Cloud Workstations API Product documentation: https://cloud.google.com/workstations/docs +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-workstations/SECURITY.md b/packages/google-cloud-workstations/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-workstations/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-workstations/docs/CHANGELOG.md b/packages/google-cloud-workstations/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-workstations/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-workstations/docs/README.rst b/packages/google-cloud-workstations/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-workstations/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-workstations/docs/_static/custom.css b/packages/google-cloud-workstations/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-workstations/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-workstations/docs/_templates/layout.html b/packages/google-cloud-workstations/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-workstations/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-workstations/docs/conf.py b/packages/google-cloud-workstations/docs/conf.py new file mode 100644 index 000000000000..c2f96079022d --- /dev/null +++ b/packages/google-cloud-workstations/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-workstations documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-workstations" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-workstations", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-workstations-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-workstations.tex", + "google-cloud-workstations Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-workstations", + "google-cloud-workstations Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-workstations", + "google-cloud-workstations Documentation", + author, + "google-cloud-workstations", + "google-cloud-workstations Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-workstations/docs/index.rst b/packages/google-cloud-workstations/docs/index.rst new file mode 100644 index 000000000000..65e3e059c9c3 --- /dev/null +++ b/packages/google-cloud-workstations/docs/index.rst @@ -0,0 +1,34 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of Cloud Workstations API. +By default, you will get version ``workstations_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + workstations_v1/services + workstations_v1/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + workstations_v1beta/services + workstations_v1beta/types + + +Changelog +--------- + +For a list of all ``google-cloud-workstations`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-workstations/docs/multiprocessing.rst b/packages/google-cloud-workstations/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-workstations/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-workstations/docs/workstations_v1/services.rst b/packages/google-cloud-workstations/docs/workstations_v1/services.rst new file mode 100644 index 000000000000..4d34f8795a80 --- /dev/null +++ b/packages/google-cloud-workstations/docs/workstations_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Workstations v1 API +============================================= +.. toctree:: + :maxdepth: 2 + + workstations diff --git a/packages/google-cloud-workstations/docs/workstations_v1/types.rst b/packages/google-cloud-workstations/docs/workstations_v1/types.rst new file mode 100644 index 000000000000..f2d1f8aae76b --- /dev/null +++ b/packages/google-cloud-workstations/docs/workstations_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Workstations v1 API +========================================== + +.. automodule:: google.cloud.workstations_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-workstations/docs/workstations_v1/workstations.rst b/packages/google-cloud-workstations/docs/workstations_v1/workstations.rst new file mode 100644 index 000000000000..1a3ded4d683d --- /dev/null +++ b/packages/google-cloud-workstations/docs/workstations_v1/workstations.rst @@ -0,0 +1,10 @@ +Workstations +------------------------------ + +.. automodule:: google.cloud.workstations_v1.services.workstations + :members: + :inherited-members: + +.. automodule:: google.cloud.workstations_v1.services.workstations.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-workstations/docs/workstations_v1beta/services.rst b/packages/google-cloud-workstations/docs/workstations_v1beta/services.rst new file mode 100644 index 000000000000..77aed9f52d7a --- /dev/null +++ b/packages/google-cloud-workstations/docs/workstations_v1beta/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Workstations v1beta API +================================================= +.. toctree:: + :maxdepth: 2 + + workstations diff --git a/packages/google-cloud-workstations/docs/workstations_v1beta/types.rst b/packages/google-cloud-workstations/docs/workstations_v1beta/types.rst new file mode 100644 index 000000000000..af7f36fbcb60 --- /dev/null +++ b/packages/google-cloud-workstations/docs/workstations_v1beta/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Workstations v1beta API +============================================== + +.. automodule:: google.cloud.workstations_v1beta.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-workstations/docs/workstations_v1beta/workstations.rst b/packages/google-cloud-workstations/docs/workstations_v1beta/workstations.rst new file mode 100644 index 000000000000..5839880c918a --- /dev/null +++ b/packages/google-cloud-workstations/docs/workstations_v1beta/workstations.rst @@ -0,0 +1,10 @@ +Workstations +------------------------------ + +.. automodule:: google.cloud.workstations_v1beta.services.workstations + :members: + :inherited-members: + +.. automodule:: google.cloud.workstations_v1beta.services.workstations.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-workstations/google/cloud/workstations/__init__.py b/packages/google-cloud-workstations/google/cloud/workstations/__init__.py new file mode 100644 index 000000000000..cf3c21dea364 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations/__init__.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.workstations import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.workstations_v1.services.workstations.async_client import ( + WorkstationsAsyncClient, +) +from google.cloud.workstations_v1.services.workstations.client import WorkstationsClient +from google.cloud.workstations_v1.types.workstations import ( + CreateWorkstationClusterRequest, + CreateWorkstationConfigRequest, + CreateWorkstationRequest, + DeleteWorkstationClusterRequest, + DeleteWorkstationConfigRequest, + DeleteWorkstationRequest, + GenerateAccessTokenRequest, + GenerateAccessTokenResponse, + GetWorkstationClusterRequest, + GetWorkstationConfigRequest, + GetWorkstationRequest, + ListUsableWorkstationConfigsRequest, + ListUsableWorkstationConfigsResponse, + ListUsableWorkstationsRequest, + ListUsableWorkstationsResponse, + ListWorkstationClustersRequest, + ListWorkstationClustersResponse, + ListWorkstationConfigsRequest, + ListWorkstationConfigsResponse, + ListWorkstationsRequest, + ListWorkstationsResponse, + OperationMetadata, + StartWorkstationRequest, + StopWorkstationRequest, + UpdateWorkstationClusterRequest, + UpdateWorkstationConfigRequest, + UpdateWorkstationRequest, + Workstation, + WorkstationCluster, + WorkstationConfig, +) + +__all__ = ( + "WorkstationsClient", + "WorkstationsAsyncClient", + "CreateWorkstationClusterRequest", + "CreateWorkstationConfigRequest", + "CreateWorkstationRequest", + "DeleteWorkstationClusterRequest", + "DeleteWorkstationConfigRequest", + "DeleteWorkstationRequest", + "GenerateAccessTokenRequest", + "GenerateAccessTokenResponse", + "GetWorkstationClusterRequest", + "GetWorkstationConfigRequest", + "GetWorkstationRequest", + "ListUsableWorkstationConfigsRequest", + "ListUsableWorkstationConfigsResponse", + "ListUsableWorkstationsRequest", + "ListUsableWorkstationsResponse", + "ListWorkstationClustersRequest", + "ListWorkstationClustersResponse", + "ListWorkstationConfigsRequest", + "ListWorkstationConfigsResponse", + "ListWorkstationsRequest", + "ListWorkstationsResponse", + "OperationMetadata", + "StartWorkstationRequest", + "StopWorkstationRequest", + "UpdateWorkstationClusterRequest", + "UpdateWorkstationConfigRequest", + "UpdateWorkstationRequest", + "Workstation", + "WorkstationCluster", + "WorkstationConfig", +) diff --git a/packages/google-cloud-workstations/google/cloud/workstations/gapic_version.py b/packages/google-cloud-workstations/google/cloud/workstations/gapic_version.py new file mode 100644 index 000000000000..c2ac4ad75f9b --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.4.0" # {x-release-please-version} diff --git a/packages/google-cloud-workstations/google/cloud/workstations/py.typed b/packages/google-cloud-workstations/google/cloud/workstations/py.typed new file mode 100644 index 000000000000..04170223dff9 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-workstations package uses inline types. diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/__init__.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/__init__.py new file mode 100644 index 000000000000..659f7ea3e2fd --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/__init__.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.workstations_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.workstations import WorkstationsAsyncClient, WorkstationsClient +from .types.workstations import ( + CreateWorkstationClusterRequest, + CreateWorkstationConfigRequest, + CreateWorkstationRequest, + DeleteWorkstationClusterRequest, + DeleteWorkstationConfigRequest, + DeleteWorkstationRequest, + GenerateAccessTokenRequest, + GenerateAccessTokenResponse, + GetWorkstationClusterRequest, + GetWorkstationConfigRequest, + GetWorkstationRequest, + ListUsableWorkstationConfigsRequest, + ListUsableWorkstationConfigsResponse, + ListUsableWorkstationsRequest, + ListUsableWorkstationsResponse, + ListWorkstationClustersRequest, + ListWorkstationClustersResponse, + ListWorkstationConfigsRequest, + ListWorkstationConfigsResponse, + ListWorkstationsRequest, + ListWorkstationsResponse, + OperationMetadata, + StartWorkstationRequest, + StopWorkstationRequest, + UpdateWorkstationClusterRequest, + UpdateWorkstationConfigRequest, + UpdateWorkstationRequest, + Workstation, + WorkstationCluster, + WorkstationConfig, +) + +__all__ = ( + "WorkstationsAsyncClient", + "CreateWorkstationClusterRequest", + "CreateWorkstationConfigRequest", + "CreateWorkstationRequest", + "DeleteWorkstationClusterRequest", + "DeleteWorkstationConfigRequest", + "DeleteWorkstationRequest", + "GenerateAccessTokenRequest", + "GenerateAccessTokenResponse", + "GetWorkstationClusterRequest", + "GetWorkstationConfigRequest", + "GetWorkstationRequest", + "ListUsableWorkstationConfigsRequest", + "ListUsableWorkstationConfigsResponse", + "ListUsableWorkstationsRequest", + "ListUsableWorkstationsResponse", + "ListWorkstationClustersRequest", + "ListWorkstationClustersResponse", + "ListWorkstationConfigsRequest", + "ListWorkstationConfigsResponse", + "ListWorkstationsRequest", + "ListWorkstationsResponse", + "OperationMetadata", + "StartWorkstationRequest", + "StopWorkstationRequest", + "UpdateWorkstationClusterRequest", + "UpdateWorkstationConfigRequest", + "UpdateWorkstationRequest", + "Workstation", + "WorkstationCluster", + "WorkstationConfig", + "WorkstationsClient", +) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/gapic_metadata.json b/packages/google-cloud-workstations/google/cloud/workstations_v1/gapic_metadata.json new file mode 100644 index 000000000000..27a21c6d848b --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/gapic_metadata.json @@ -0,0 +1,328 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.workstations_v1", + "protoPackage": "google.cloud.workstations.v1", + "schema": "1.0", + "services": { + "Workstations": { + "clients": { + "grpc": { + "libraryClient": "WorkstationsClient", + "rpcs": { + "CreateWorkstation": { + "methods": [ + "create_workstation" + ] + }, + "CreateWorkstationCluster": { + "methods": [ + "create_workstation_cluster" + ] + }, + "CreateWorkstationConfig": { + "methods": [ + "create_workstation_config" + ] + }, + "DeleteWorkstation": { + "methods": [ + "delete_workstation" + ] + }, + "DeleteWorkstationCluster": { + "methods": [ + "delete_workstation_cluster" + ] + }, + "DeleteWorkstationConfig": { + "methods": [ + "delete_workstation_config" + ] + }, + "GenerateAccessToken": { + "methods": [ + "generate_access_token" + ] + }, + "GetWorkstation": { + "methods": [ + "get_workstation" + ] + }, + "GetWorkstationCluster": { + "methods": [ + "get_workstation_cluster" + ] + }, + "GetWorkstationConfig": { + "methods": [ + "get_workstation_config" + ] + }, + "ListUsableWorkstationConfigs": { + "methods": [ + "list_usable_workstation_configs" + ] + }, + "ListUsableWorkstations": { + "methods": [ + "list_usable_workstations" + ] + }, + "ListWorkstationClusters": { + "methods": [ + "list_workstation_clusters" + ] + }, + "ListWorkstationConfigs": { + "methods": [ + "list_workstation_configs" + ] + }, + "ListWorkstations": { + "methods": [ + "list_workstations" + ] + }, + "StartWorkstation": { + "methods": [ + "start_workstation" + ] + }, + "StopWorkstation": { + "methods": [ + "stop_workstation" + ] + }, + "UpdateWorkstation": { + "methods": [ + "update_workstation" + ] + }, + "UpdateWorkstationCluster": { + "methods": [ + "update_workstation_cluster" + ] + }, + "UpdateWorkstationConfig": { + "methods": [ + "update_workstation_config" + ] + } + } + }, + "grpc-async": { + "libraryClient": "WorkstationsAsyncClient", + "rpcs": { + "CreateWorkstation": { + "methods": [ + "create_workstation" + ] + }, + "CreateWorkstationCluster": { + "methods": [ + "create_workstation_cluster" + ] + }, + "CreateWorkstationConfig": { + "methods": [ + "create_workstation_config" + ] + }, + "DeleteWorkstation": { + "methods": [ + "delete_workstation" + ] + }, + "DeleteWorkstationCluster": { + "methods": [ + "delete_workstation_cluster" + ] + }, + "DeleteWorkstationConfig": { + "methods": [ + "delete_workstation_config" + ] + }, + "GenerateAccessToken": { + "methods": [ + "generate_access_token" + ] + }, + "GetWorkstation": { + "methods": [ + "get_workstation" + ] + }, + "GetWorkstationCluster": { + "methods": [ + "get_workstation_cluster" + ] + }, + "GetWorkstationConfig": { + "methods": [ + "get_workstation_config" + ] + }, + "ListUsableWorkstationConfigs": { + "methods": [ + "list_usable_workstation_configs" + ] + }, + "ListUsableWorkstations": { + "methods": [ + "list_usable_workstations" + ] + }, + "ListWorkstationClusters": { + "methods": [ + "list_workstation_clusters" + ] + }, + "ListWorkstationConfigs": { + "methods": [ + "list_workstation_configs" + ] + }, + "ListWorkstations": { + "methods": [ + "list_workstations" + ] + }, + "StartWorkstation": { + "methods": [ + "start_workstation" + ] + }, + "StopWorkstation": { + "methods": [ + "stop_workstation" + ] + }, + "UpdateWorkstation": { + "methods": [ + "update_workstation" + ] + }, + "UpdateWorkstationCluster": { + "methods": [ + "update_workstation_cluster" + ] + }, + "UpdateWorkstationConfig": { + "methods": [ + "update_workstation_config" + ] + } + } + }, + "rest": { + "libraryClient": "WorkstationsClient", + "rpcs": { + "CreateWorkstation": { + "methods": [ + "create_workstation" + ] + }, + "CreateWorkstationCluster": { + "methods": [ + "create_workstation_cluster" + ] + }, + "CreateWorkstationConfig": { + "methods": [ + "create_workstation_config" + ] + }, + "DeleteWorkstation": { + "methods": [ + "delete_workstation" + ] + }, + "DeleteWorkstationCluster": { + "methods": [ + "delete_workstation_cluster" + ] + }, + "DeleteWorkstationConfig": { + "methods": [ + "delete_workstation_config" + ] + }, + "GenerateAccessToken": { + "methods": [ + "generate_access_token" + ] + }, + "GetWorkstation": { + "methods": [ + "get_workstation" + ] + }, + "GetWorkstationCluster": { + "methods": [ + "get_workstation_cluster" + ] + }, + "GetWorkstationConfig": { + "methods": [ + "get_workstation_config" + ] + }, + "ListUsableWorkstationConfigs": { + "methods": [ + "list_usable_workstation_configs" + ] + }, + "ListUsableWorkstations": { + "methods": [ + "list_usable_workstations" + ] + }, + "ListWorkstationClusters": { + "methods": [ + "list_workstation_clusters" + ] + }, + "ListWorkstationConfigs": { + "methods": [ + "list_workstation_configs" + ] + }, + "ListWorkstations": { + "methods": [ + "list_workstations" + ] + }, + "StartWorkstation": { + "methods": [ + "start_workstation" + ] + }, + "StopWorkstation": { + "methods": [ + "stop_workstation" + ] + }, + "UpdateWorkstation": { + "methods": [ + "update_workstation" + ] + }, + "UpdateWorkstationCluster": { + "methods": [ + "update_workstation_cluster" + ] + }, + "UpdateWorkstationConfig": { + "methods": [ + "update_workstation_config" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/gapic_version.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/gapic_version.py new file mode 100644 index 000000000000..c2ac4ad75f9b --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.4.0" # {x-release-please-version} diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/py.typed b/packages/google-cloud-workstations/google/cloud/workstations_v1/py.typed new file mode 100644 index 000000000000..04170223dff9 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-workstations package uses inline types. diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/services/__init__.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/__init__.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/__init__.py new file mode 100644 index 000000000000..2d2ad17879bd --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import WorkstationsAsyncClient +from .client import WorkstationsClient + +__all__ = ( + "WorkstationsClient", + "WorkstationsAsyncClient", +) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/async_client.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/async_client.py new file mode 100644 index 000000000000..138211a5360d --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/async_client.py @@ -0,0 +1,3227 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.workstations_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + +from google.cloud.workstations_v1.services.workstations import pagers +from google.cloud.workstations_v1.types import workstations + +from .client import WorkstationsClient +from .transports.base import DEFAULT_CLIENT_INFO, WorkstationsTransport +from .transports.grpc_asyncio import WorkstationsGrpcAsyncIOTransport + + +class WorkstationsAsyncClient: + """Service for interacting with Cloud Workstations.""" + + _client: WorkstationsClient + + DEFAULT_ENDPOINT = WorkstationsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = WorkstationsClient.DEFAULT_MTLS_ENDPOINT + + workstation_path = staticmethod(WorkstationsClient.workstation_path) + parse_workstation_path = staticmethod(WorkstationsClient.parse_workstation_path) + workstation_cluster_path = staticmethod(WorkstationsClient.workstation_cluster_path) + parse_workstation_cluster_path = staticmethod( + WorkstationsClient.parse_workstation_cluster_path + ) + workstation_config_path = staticmethod(WorkstationsClient.workstation_config_path) + parse_workstation_config_path = staticmethod( + WorkstationsClient.parse_workstation_config_path + ) + common_billing_account_path = staticmethod( + WorkstationsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + WorkstationsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(WorkstationsClient.common_folder_path) + parse_common_folder_path = staticmethod(WorkstationsClient.parse_common_folder_path) + common_organization_path = staticmethod(WorkstationsClient.common_organization_path) + parse_common_organization_path = staticmethod( + WorkstationsClient.parse_common_organization_path + ) + common_project_path = staticmethod(WorkstationsClient.common_project_path) + parse_common_project_path = staticmethod( + WorkstationsClient.parse_common_project_path + ) + common_location_path = staticmethod(WorkstationsClient.common_location_path) + parse_common_location_path = staticmethod( + WorkstationsClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkstationsAsyncClient: The constructed client. + """ + return WorkstationsClient.from_service_account_info.__func__(WorkstationsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkstationsAsyncClient: The constructed client. + """ + return WorkstationsClient.from_service_account_file.__func__(WorkstationsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return WorkstationsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> WorkstationsTransport: + """Returns the transport used by the client instance. + + Returns: + WorkstationsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(WorkstationsClient).get_transport_class, type(WorkstationsClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, WorkstationsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the workstations client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.WorkstationsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = WorkstationsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_workstation_cluster( + self, + request: Optional[ + Union[workstations.GetWorkstationClusterRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.WorkstationCluster: + r"""Returns the requested workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_get_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.GetWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workstation_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.GetWorkstationClusterRequest, dict]]): + The request object. Request message for + GetWorkstationCluster. + name (:class:`str`): + Required. Name of the requested + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.types.WorkstationCluster: + A grouping of workstation + configurations and the associated + workstations in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.GetWorkstationClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_workstation_cluster, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_workstation_clusters( + self, + request: Optional[ + Union[workstations.ListWorkstationClustersRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkstationClustersAsyncPager: + r"""Returns all workstation clusters in the specified + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_list_workstation_clusters(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.ListWorkstationClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_clusters(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.ListWorkstationClustersRequest, dict]]): + The request object. Request message for + ListWorkstationClusters. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.services.workstations.pagers.ListWorkstationClustersAsyncPager: + Response message for + ListWorkstationClusters. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.ListWorkstationClustersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_workstation_clusters, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListWorkstationClustersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_workstation_cluster( + self, + request: Optional[ + Union[workstations.CreateWorkstationClusterRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + workstation_cluster: Optional[workstations.WorkstationCluster] = None, + workstation_cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_create_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.CreateWorkstationClusterRequest( + parent="parent_value", + workstation_cluster_id="workstation_cluster_id_value", + ) + + # Make the request + operation = client.create_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.CreateWorkstationClusterRequest, dict]]): + The request object. Message for creating a + CreateWorkstationCluster. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_cluster (:class:`google.cloud.workstations_v1.types.WorkstationCluster`): + Required. Workstation cluster to + create. + + This corresponds to the ``workstation_cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_cluster_id (:class:`str`): + Required. ID to use for the + workstation cluster. + + This corresponds to the ``workstation_cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1.types.WorkstationCluster` A grouping of workstation configurations and the associated workstations + in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, workstation_cluster, workstation_cluster_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.CreateWorkstationClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workstation_cluster is not None: + request.workstation_cluster = workstation_cluster + if workstation_cluster_id is not None: + request.workstation_cluster_id = workstation_cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_workstation_cluster, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.WorkstationCluster, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_workstation_cluster( + self, + request: Optional[ + Union[workstations.UpdateWorkstationClusterRequest, dict] + ] = None, + *, + workstation_cluster: Optional[workstations.WorkstationCluster] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an existing workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_update_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.UpdateWorkstationClusterRequest( + ) + + # Make the request + operation = client.update_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.UpdateWorkstationClusterRequest, dict]]): + The request object. Request message for + UpdateWorkstationCluster. + workstation_cluster (:class:`google.cloud.workstations_v1.types.WorkstationCluster`): + Required. Workstation cluster to + update. + + This corresponds to the ``workstation_cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask that specifies which + fields in the workstation cluster should + be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1.types.WorkstationCluster` A grouping of workstation configurations and the associated workstations + in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation_cluster, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.UpdateWorkstationClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation_cluster is not None: + request.workstation_cluster = workstation_cluster + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_workstation_cluster, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation_cluster.name", request.workstation_cluster.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.WorkstationCluster, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_workstation_cluster( + self, + request: Optional[ + Union[workstations.DeleteWorkstationClusterRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes the specified workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_delete_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.DeleteWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.DeleteWorkstationClusterRequest, dict]]): + The request object. Message for deleting a workstation + cluster. + name (:class:`str`): + Required. Name of the workstation + cluster to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1.types.WorkstationCluster` A grouping of workstation configurations and the associated workstations + in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.DeleteWorkstationClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_workstation_cluster, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.WorkstationCluster, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_workstation_config( + self, + request: Optional[Union[workstations.GetWorkstationConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.WorkstationConfig: + r"""Returns the requested workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_get_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.GetWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workstation_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.GetWorkstationConfigRequest, dict]]): + The request object. Request message for + GetWorkstationConfig. + name (:class:`str`): + Required. Name of the requested + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.types.WorkstationConfig: + A set of configuration options + describing how a workstation will be + run. Workstation configurations are + intended to be shared across multiple + workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.GetWorkstationConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_workstation_config, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_workstation_configs( + self, + request: Optional[ + Union[workstations.ListWorkstationConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkstationConfigsAsyncPager: + r"""Returns all workstation configurations in the + specified cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_list_workstation_configs(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.ListWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.ListWorkstationConfigsRequest, dict]]): + The request object. Request message for + ListWorkstationConfigs. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.services.workstations.pagers.ListWorkstationConfigsAsyncPager: + Response message for + ListWorkstationConfigs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.ListWorkstationConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_workstation_configs, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListWorkstationConfigsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_usable_workstation_configs( + self, + request: Optional[ + Union[workstations.ListUsableWorkstationConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsableWorkstationConfigsAsyncPager: + r"""Returns all workstation configurations in the + specified cluster on which the caller has the + "workstations.workstation.create" permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_list_usable_workstation_configs(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.ListUsableWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstation_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.ListUsableWorkstationConfigsRequest, dict]]): + The request object. Request message for + ListUsableWorkstationConfigs. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.services.workstations.pagers.ListUsableWorkstationConfigsAsyncPager: + Response message for + ListUsableWorkstationConfigs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.ListUsableWorkstationConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_usable_workstation_configs, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListUsableWorkstationConfigsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_workstation_config( + self, + request: Optional[ + Union[workstations.CreateWorkstationConfigRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + workstation_config: Optional[workstations.WorkstationConfig] = None, + workstation_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_create_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.CreateWorkstationConfigRequest( + parent="parent_value", + workstation_config_id="workstation_config_id_value", + ) + + # Make the request + operation = client.create_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.CreateWorkstationConfigRequest, dict]]): + The request object. Message for creating a + CreateWorkstationConfig. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_config (:class:`google.cloud.workstations_v1.types.WorkstationConfig`): + Required. Config to create. + This corresponds to the ``workstation_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_config_id (:class:`str`): + Required. ID to use for the + workstation configuration. + + This corresponds to the ``workstation_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1.types.WorkstationConfig` A set of configuration options describing how a workstation will be run. + Workstation configurations are intended to be shared + across multiple workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, workstation_config, workstation_config_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.CreateWorkstationConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workstation_config is not None: + request.workstation_config = workstation_config + if workstation_config_id is not None: + request.workstation_config_id = workstation_config_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_workstation_config, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.WorkstationConfig, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_workstation_config( + self, + request: Optional[ + Union[workstations.UpdateWorkstationConfigRequest, dict] + ] = None, + *, + workstation_config: Optional[workstations.WorkstationConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an existing workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_update_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.UpdateWorkstationConfigRequest( + ) + + # Make the request + operation = client.update_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.UpdateWorkstationConfigRequest, dict]]): + The request object. Request message for + UpdateWorkstationConfig. + workstation_config (:class:`google.cloud.workstations_v1.types.WorkstationConfig`): + Required. Config to update. + This corresponds to the ``workstation_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask specifying which + fields in the workstation configuration + should be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1.types.WorkstationConfig` A set of configuration options describing how a workstation will be run. + Workstation configurations are intended to be shared + across multiple workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.UpdateWorkstationConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation_config is not None: + request.workstation_config = workstation_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_workstation_config, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation_config.name", request.workstation_config.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.WorkstationConfig, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_workstation_config( + self, + request: Optional[ + Union[workstations.DeleteWorkstationConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes the specified workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_delete_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.DeleteWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.DeleteWorkstationConfigRequest, dict]]): + The request object. Message for deleting a workstation + configuration. + name (:class:`str`): + Required. Name of the workstation + configuration to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1.types.WorkstationConfig` A set of configuration options describing how a workstation will be run. + Workstation configurations are intended to be shared + across multiple workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.DeleteWorkstationConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_workstation_config, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.WorkstationConfig, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_workstation( + self, + request: Optional[Union[workstations.GetWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.Workstation: + r"""Returns the requested workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_get_workstation(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.GetWorkstationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workstation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.GetWorkstationRequest, dict]]): + The request object. Request message for GetWorkstation. + name (:class:`str`): + Required. Name of the requested + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.types.Workstation: + A single instance of a developer + workstation with its own persistent + storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.GetWorkstationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_workstation, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_workstations( + self, + request: Optional[Union[workstations.ListWorkstationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkstationsAsyncPager: + r"""Returns all Workstations using the specified + workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_list_workstations(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.ListWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.ListWorkstationsRequest, dict]]): + The request object. Request message for ListWorkstations. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.services.workstations.pagers.ListWorkstationsAsyncPager: + Response message for + ListWorkstations. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.ListWorkstationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_workstations, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListWorkstationsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_usable_workstations( + self, + request: Optional[ + Union[workstations.ListUsableWorkstationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsableWorkstationsAsyncPager: + r"""Returns all workstations using the specified + workstation configuration on which the caller has the + "workstations.workstations.use" permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_list_usable_workstations(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.ListUsableWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.ListUsableWorkstationsRequest, dict]]): + The request object. Request message for + ListUsableWorkstations. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.services.workstations.pagers.ListUsableWorkstationsAsyncPager: + Response message for + ListUsableWorkstations. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.ListUsableWorkstationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_usable_workstations, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListUsableWorkstationsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_workstation( + self, + request: Optional[Union[workstations.CreateWorkstationRequest, dict]] = None, + *, + parent: Optional[str] = None, + workstation: Optional[workstations.Workstation] = None, + workstation_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_create_workstation(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.CreateWorkstationRequest( + parent="parent_value", + workstation_id="workstation_id_value", + ) + + # Make the request + operation = client.create_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.CreateWorkstationRequest, dict]]): + The request object. Message for creating a + CreateWorkstation. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation (:class:`google.cloud.workstations_v1.types.Workstation`): + Required. Workstation to create. + This corresponds to the ``workstation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_id (:class:`str`): + Required. ID to use for the + workstation. + + This corresponds to the ``workstation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, workstation, workstation_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.CreateWorkstationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workstation is not None: + request.workstation = workstation + if workstation_id is not None: + request.workstation_id = workstation_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_workstation, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_workstation( + self, + request: Optional[Union[workstations.UpdateWorkstationRequest, dict]] = None, + *, + workstation: Optional[workstations.Workstation] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an existing workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_update_workstation(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.UpdateWorkstationRequest( + ) + + # Make the request + operation = client.update_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.UpdateWorkstationRequest, dict]]): + The request object. Request message for + UpdateWorkstation. + workstation (:class:`google.cloud.workstations_v1.types.Workstation`): + Required. Workstation to update. + This corresponds to the ``workstation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask specifying which + fields in the workstation configuration + should be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.UpdateWorkstationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation is not None: + request.workstation = workstation + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_workstation, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation.name", request.workstation.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_workstation( + self, + request: Optional[Union[workstations.DeleteWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes the specified workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_delete_workstation(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.DeleteWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.DeleteWorkstationRequest, dict]]): + The request object. Request message for + DeleteWorkstation. + name (:class:`str`): + Required. Name of the workstation to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.DeleteWorkstationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_workstation, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def start_workstation( + self, + request: Optional[Union[workstations.StartWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts running a workstation so that users can + connect to it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_start_workstation(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.StartWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.start_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.StartWorkstationRequest, dict]]): + The request object. Request message for StartWorkstation. + name (:class:`str`): + Required. Name of the workstation to + start. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.StartWorkstationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.start_workstation, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def stop_workstation( + self, + request: Optional[Union[workstations.StopWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Stops running a workstation, reducing costs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_stop_workstation(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.StopWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.stop_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.StopWorkstationRequest, dict]]): + The request object. Request message for StopWorkstation. + name (:class:`str`): + Required. Name of the workstation to + stop. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.StopWorkstationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.stop_workstation, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def generate_access_token( + self, + request: Optional[Union[workstations.GenerateAccessTokenRequest, dict]] = None, + *, + workstation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.GenerateAccessTokenResponse: + r"""Returns a short-lived credential that can be used to + send authenticated and authorized traffic to a + workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + async def sample_generate_access_token(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.GenerateAccessTokenRequest( + workstation="workstation_value", + ) + + # Make the request + response = await client.generate_access_token(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1.types.GenerateAccessTokenRequest, dict]]): + The request object. Request message for + GenerateAccessToken. + workstation (:class:`str`): + Required. Name of the workstation for + which the access token should be + generated. + + This corresponds to the ``workstation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.types.GenerateAccessTokenResponse: + Response message for + GenerateAccessToken. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.GenerateAccessTokenRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation is not None: + request.workstation = workstation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_access_token, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation", request.workstation),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("WorkstationsAsyncClient",) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/client.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/client.py new file mode 100644 index 000000000000..370fd263b704 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/client.py @@ -0,0 +1,3442 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.workstations_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + +from google.cloud.workstations_v1.services.workstations import pagers +from google.cloud.workstations_v1.types import workstations + +from .transports.base import DEFAULT_CLIENT_INFO, WorkstationsTransport +from .transports.grpc import WorkstationsGrpcTransport +from .transports.grpc_asyncio import WorkstationsGrpcAsyncIOTransport +from .transports.rest import WorkstationsRestTransport + + +class WorkstationsClientMeta(type): + """Metaclass for the Workstations client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[WorkstationsTransport]] + _transport_registry["grpc"] = WorkstationsGrpcTransport + _transport_registry["grpc_asyncio"] = WorkstationsGrpcAsyncIOTransport + _transport_registry["rest"] = WorkstationsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[WorkstationsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class WorkstationsClient(metaclass=WorkstationsClientMeta): + """Service for interacting with Cloud Workstations.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "workstations.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkstationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkstationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> WorkstationsTransport: + """Returns the transport used by the client instance. + + Returns: + WorkstationsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def workstation_path( + project: str, + location: str, + workstation_cluster: str, + workstation_config: str, + workstation: str, + ) -> str: + """Returns a fully-qualified workstation string.""" + return "projects/{project}/locations/{location}/workstationClusters/{workstation_cluster}/workstationConfigs/{workstation_config}/workstations/{workstation}".format( + project=project, + location=location, + workstation_cluster=workstation_cluster, + workstation_config=workstation_config, + workstation=workstation, + ) + + @staticmethod + def parse_workstation_path(path: str) -> Dict[str, str]: + """Parses a workstation path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workstationClusters/(?P.+?)/workstationConfigs/(?P.+?)/workstations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def workstation_cluster_path( + project: str, + location: str, + workstation_cluster: str, + ) -> str: + """Returns a fully-qualified workstation_cluster string.""" + return "projects/{project}/locations/{location}/workstationClusters/{workstation_cluster}".format( + project=project, + location=location, + workstation_cluster=workstation_cluster, + ) + + @staticmethod + def parse_workstation_cluster_path(path: str) -> Dict[str, str]: + """Parses a workstation_cluster path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workstationClusters/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def workstation_config_path( + project: str, + location: str, + workstation_cluster: str, + workstation_config: str, + ) -> str: + """Returns a fully-qualified workstation_config string.""" + return "projects/{project}/locations/{location}/workstationClusters/{workstation_cluster}/workstationConfigs/{workstation_config}".format( + project=project, + location=location, + workstation_cluster=workstation_cluster, + workstation_config=workstation_config, + ) + + @staticmethod + def parse_workstation_config_path(path: str) -> Dict[str, str]: + """Parses a workstation_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workstationClusters/(?P.+?)/workstationConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, WorkstationsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the workstations client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, WorkstationsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, WorkstationsTransport): + # transport is a WorkstationsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get_workstation_cluster( + self, + request: Optional[ + Union[workstations.GetWorkstationClusterRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.WorkstationCluster: + r"""Returns the requested workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_get_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.GetWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_workstation_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.GetWorkstationClusterRequest, dict]): + The request object. Request message for + GetWorkstationCluster. + name (str): + Required. Name of the requested + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.types.WorkstationCluster: + A grouping of workstation + configurations and the associated + workstations in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.GetWorkstationClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.GetWorkstationClusterRequest): + request = workstations.GetWorkstationClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_workstation_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_workstation_clusters( + self, + request: Optional[ + Union[workstations.ListWorkstationClustersRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkstationClustersPager: + r"""Returns all workstation clusters in the specified + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_list_workstation_clusters(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.ListWorkstationClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.ListWorkstationClustersRequest, dict]): + The request object. Request message for + ListWorkstationClusters. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.services.workstations.pagers.ListWorkstationClustersPager: + Response message for + ListWorkstationClusters. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.ListWorkstationClustersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.ListWorkstationClustersRequest): + request = workstations.ListWorkstationClustersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_workstation_clusters + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListWorkstationClustersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_workstation_cluster( + self, + request: Optional[ + Union[workstations.CreateWorkstationClusterRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + workstation_cluster: Optional[workstations.WorkstationCluster] = None, + workstation_cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_create_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.CreateWorkstationClusterRequest( + parent="parent_value", + workstation_cluster_id="workstation_cluster_id_value", + ) + + # Make the request + operation = client.create_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.CreateWorkstationClusterRequest, dict]): + The request object. Message for creating a + CreateWorkstationCluster. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_cluster (google.cloud.workstations_v1.types.WorkstationCluster): + Required. Workstation cluster to + create. + + This corresponds to the ``workstation_cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_cluster_id (str): + Required. ID to use for the + workstation cluster. + + This corresponds to the ``workstation_cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1.types.WorkstationCluster` A grouping of workstation configurations and the associated workstations + in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, workstation_cluster, workstation_cluster_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.CreateWorkstationClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.CreateWorkstationClusterRequest): + request = workstations.CreateWorkstationClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workstation_cluster is not None: + request.workstation_cluster = workstation_cluster + if workstation_cluster_id is not None: + request.workstation_cluster_id = workstation_cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_workstation_cluster + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.WorkstationCluster, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_workstation_cluster( + self, + request: Optional[ + Union[workstations.UpdateWorkstationClusterRequest, dict] + ] = None, + *, + workstation_cluster: Optional[workstations.WorkstationCluster] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates an existing workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_update_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.UpdateWorkstationClusterRequest( + ) + + # Make the request + operation = client.update_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.UpdateWorkstationClusterRequest, dict]): + The request object. Request message for + UpdateWorkstationCluster. + workstation_cluster (google.cloud.workstations_v1.types.WorkstationCluster): + Required. Workstation cluster to + update. + + This corresponds to the ``workstation_cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask that specifies which + fields in the workstation cluster should + be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1.types.WorkstationCluster` A grouping of workstation configurations and the associated workstations + in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation_cluster, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.UpdateWorkstationClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.UpdateWorkstationClusterRequest): + request = workstations.UpdateWorkstationClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation_cluster is not None: + request.workstation_cluster = workstation_cluster + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_workstation_cluster + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation_cluster.name", request.workstation_cluster.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.WorkstationCluster, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_workstation_cluster( + self, + request: Optional[ + Union[workstations.DeleteWorkstationClusterRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes the specified workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_delete_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.DeleteWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.DeleteWorkstationClusterRequest, dict]): + The request object. Message for deleting a workstation + cluster. + name (str): + Required. Name of the workstation + cluster to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1.types.WorkstationCluster` A grouping of workstation configurations and the associated workstations + in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.DeleteWorkstationClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.DeleteWorkstationClusterRequest): + request = workstations.DeleteWorkstationClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_workstation_cluster + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.WorkstationCluster, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_workstation_config( + self, + request: Optional[Union[workstations.GetWorkstationConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.WorkstationConfig: + r"""Returns the requested workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_get_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.GetWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_workstation_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.GetWorkstationConfigRequest, dict]): + The request object. Request message for + GetWorkstationConfig. + name (str): + Required. Name of the requested + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.types.WorkstationConfig: + A set of configuration options + describing how a workstation will be + run. Workstation configurations are + intended to be shared across multiple + workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.GetWorkstationConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.GetWorkstationConfigRequest): + request = workstations.GetWorkstationConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_workstation_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_workstation_configs( + self, + request: Optional[ + Union[workstations.ListWorkstationConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkstationConfigsPager: + r"""Returns all workstation configurations in the + specified cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_list_workstation_configs(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.ListWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.ListWorkstationConfigsRequest, dict]): + The request object. Request message for + ListWorkstationConfigs. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.services.workstations.pagers.ListWorkstationConfigsPager: + Response message for + ListWorkstationConfigs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.ListWorkstationConfigsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.ListWorkstationConfigsRequest): + request = workstations.ListWorkstationConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_workstation_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListWorkstationConfigsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_usable_workstation_configs( + self, + request: Optional[ + Union[workstations.ListUsableWorkstationConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsableWorkstationConfigsPager: + r"""Returns all workstation configurations in the + specified cluster on which the caller has the + "workstations.workstation.create" permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_list_usable_workstation_configs(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.ListUsableWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstation_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.ListUsableWorkstationConfigsRequest, dict]): + The request object. Request message for + ListUsableWorkstationConfigs. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.services.workstations.pagers.ListUsableWorkstationConfigsPager: + Response message for + ListUsableWorkstationConfigs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.ListUsableWorkstationConfigsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.ListUsableWorkstationConfigsRequest): + request = workstations.ListUsableWorkstationConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_usable_workstation_configs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListUsableWorkstationConfigsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_workstation_config( + self, + request: Optional[ + Union[workstations.CreateWorkstationConfigRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + workstation_config: Optional[workstations.WorkstationConfig] = None, + workstation_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_create_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.CreateWorkstationConfigRequest( + parent="parent_value", + workstation_config_id="workstation_config_id_value", + ) + + # Make the request + operation = client.create_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.CreateWorkstationConfigRequest, dict]): + The request object. Message for creating a + CreateWorkstationConfig. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_config (google.cloud.workstations_v1.types.WorkstationConfig): + Required. Config to create. + This corresponds to the ``workstation_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_config_id (str): + Required. ID to use for the + workstation configuration. + + This corresponds to the ``workstation_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1.types.WorkstationConfig` A set of configuration options describing how a workstation will be run. + Workstation configurations are intended to be shared + across multiple workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, workstation_config, workstation_config_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.CreateWorkstationConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.CreateWorkstationConfigRequest): + request = workstations.CreateWorkstationConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workstation_config is not None: + request.workstation_config = workstation_config + if workstation_config_id is not None: + request.workstation_config_id = workstation_config_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_workstation_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.WorkstationConfig, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_workstation_config( + self, + request: Optional[ + Union[workstations.UpdateWorkstationConfigRequest, dict] + ] = None, + *, + workstation_config: Optional[workstations.WorkstationConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates an existing workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_update_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.UpdateWorkstationConfigRequest( + ) + + # Make the request + operation = client.update_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.UpdateWorkstationConfigRequest, dict]): + The request object. Request message for + UpdateWorkstationConfig. + workstation_config (google.cloud.workstations_v1.types.WorkstationConfig): + Required. Config to update. + This corresponds to the ``workstation_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask specifying which + fields in the workstation configuration + should be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1.types.WorkstationConfig` A set of configuration options describing how a workstation will be run. + Workstation configurations are intended to be shared + across multiple workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.UpdateWorkstationConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.UpdateWorkstationConfigRequest): + request = workstations.UpdateWorkstationConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation_config is not None: + request.workstation_config = workstation_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_workstation_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation_config.name", request.workstation_config.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.WorkstationConfig, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_workstation_config( + self, + request: Optional[ + Union[workstations.DeleteWorkstationConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes the specified workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_delete_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.DeleteWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.DeleteWorkstationConfigRequest, dict]): + The request object. Message for deleting a workstation + configuration. + name (str): + Required. Name of the workstation + configuration to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1.types.WorkstationConfig` A set of configuration options describing how a workstation will be run. + Workstation configurations are intended to be shared + across multiple workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.DeleteWorkstationConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.DeleteWorkstationConfigRequest): + request = workstations.DeleteWorkstationConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_workstation_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.WorkstationConfig, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_workstation( + self, + request: Optional[Union[workstations.GetWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.Workstation: + r"""Returns the requested workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_get_workstation(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.GetWorkstationRequest( + name="name_value", + ) + + # Make the request + response = client.get_workstation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.GetWorkstationRequest, dict]): + The request object. Request message for GetWorkstation. + name (str): + Required. Name of the requested + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.types.Workstation: + A single instance of a developer + workstation with its own persistent + storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.GetWorkstationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.GetWorkstationRequest): + request = workstations.GetWorkstationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_workstation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_workstations( + self, + request: Optional[Union[workstations.ListWorkstationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkstationsPager: + r"""Returns all Workstations using the specified + workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_list_workstations(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.ListWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.ListWorkstationsRequest, dict]): + The request object. Request message for ListWorkstations. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.services.workstations.pagers.ListWorkstationsPager: + Response message for + ListWorkstations. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.ListWorkstationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.ListWorkstationsRequest): + request = workstations.ListWorkstationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_workstations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListWorkstationsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_usable_workstations( + self, + request: Optional[ + Union[workstations.ListUsableWorkstationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsableWorkstationsPager: + r"""Returns all workstations using the specified + workstation configuration on which the caller has the + "workstations.workstations.use" permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_list_usable_workstations(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.ListUsableWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.ListUsableWorkstationsRequest, dict]): + The request object. Request message for + ListUsableWorkstations. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.services.workstations.pagers.ListUsableWorkstationsPager: + Response message for + ListUsableWorkstations. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.ListUsableWorkstationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.ListUsableWorkstationsRequest): + request = workstations.ListUsableWorkstationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_usable_workstations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListUsableWorkstationsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_workstation( + self, + request: Optional[Union[workstations.CreateWorkstationRequest, dict]] = None, + *, + parent: Optional[str] = None, + workstation: Optional[workstations.Workstation] = None, + workstation_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_create_workstation(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.CreateWorkstationRequest( + parent="parent_value", + workstation_id="workstation_id_value", + ) + + # Make the request + operation = client.create_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.CreateWorkstationRequest, dict]): + The request object. Message for creating a + CreateWorkstation. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation (google.cloud.workstations_v1.types.Workstation): + Required. Workstation to create. + This corresponds to the ``workstation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_id (str): + Required. ID to use for the + workstation. + + This corresponds to the ``workstation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, workstation, workstation_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.CreateWorkstationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.CreateWorkstationRequest): + request = workstations.CreateWorkstationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workstation is not None: + request.workstation = workstation + if workstation_id is not None: + request.workstation_id = workstation_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_workstation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_workstation( + self, + request: Optional[Union[workstations.UpdateWorkstationRequest, dict]] = None, + *, + workstation: Optional[workstations.Workstation] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates an existing workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_update_workstation(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.UpdateWorkstationRequest( + ) + + # Make the request + operation = client.update_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.UpdateWorkstationRequest, dict]): + The request object. Request message for + UpdateWorkstation. + workstation (google.cloud.workstations_v1.types.Workstation): + Required. Workstation to update. + This corresponds to the ``workstation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask specifying which + fields in the workstation configuration + should be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.UpdateWorkstationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.UpdateWorkstationRequest): + request = workstations.UpdateWorkstationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation is not None: + request.workstation = workstation + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_workstation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation.name", request.workstation.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_workstation( + self, + request: Optional[Union[workstations.DeleteWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes the specified workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_delete_workstation(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.DeleteWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.DeleteWorkstationRequest, dict]): + The request object. Request message for + DeleteWorkstation. + name (str): + Required. Name of the workstation to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.DeleteWorkstationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.DeleteWorkstationRequest): + request = workstations.DeleteWorkstationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_workstation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def start_workstation( + self, + request: Optional[Union[workstations.StartWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Starts running a workstation so that users can + connect to it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_start_workstation(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.StartWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.start_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.StartWorkstationRequest, dict]): + The request object. Request message for StartWorkstation. + name (str): + Required. Name of the workstation to + start. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.StartWorkstationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.StartWorkstationRequest): + request = workstations.StartWorkstationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_workstation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def stop_workstation( + self, + request: Optional[Union[workstations.StopWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Stops running a workstation, reducing costs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_stop_workstation(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.StopWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.stop_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.StopWorkstationRequest, dict]): + The request object. Request message for StopWorkstation. + name (str): + Required. Name of the workstation to + stop. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.StopWorkstationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.StopWorkstationRequest): + request = workstations.StopWorkstationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_workstation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def generate_access_token( + self, + request: Optional[Union[workstations.GenerateAccessTokenRequest, dict]] = None, + *, + workstation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.GenerateAccessTokenResponse: + r"""Returns a short-lived credential that can be used to + send authenticated and authorized traffic to a + workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1 + + def sample_generate_access_token(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.GenerateAccessTokenRequest( + workstation="workstation_value", + ) + + # Make the request + response = client.generate_access_token(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1.types.GenerateAccessTokenRequest, dict]): + The request object. Request message for + GenerateAccessToken. + workstation (str): + Required. Name of the workstation for + which the access token should be + generated. + + This corresponds to the ``workstation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1.types.GenerateAccessTokenResponse: + Response message for + GenerateAccessToken. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.GenerateAccessTokenRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.GenerateAccessTokenRequest): + request = workstations.GenerateAccessTokenRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation is not None: + request.workstation = workstation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_access_token] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation", request.workstation),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "WorkstationsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("WorkstationsClient",) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/pagers.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/pagers.py new file mode 100644 index 000000000000..f4aedb6919aa --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/pagers.py @@ -0,0 +1,673 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.workstations_v1.types import workstations + + +class ListWorkstationClustersPager: + """A pager for iterating through ``list_workstation_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1.types.ListWorkstationClustersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``workstation_clusters`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListWorkstationClusters`` requests and continue to iterate + through the ``workstation_clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1.types.ListWorkstationClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., workstations.ListWorkstationClustersResponse], + request: workstations.ListWorkstationClustersRequest, + response: workstations.ListWorkstationClustersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1.types.ListWorkstationClustersRequest): + The initial request object. + response (google.cloud.workstations_v1.types.ListWorkstationClustersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListWorkstationClustersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[workstations.ListWorkstationClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[workstations.WorkstationCluster]: + for page in self.pages: + yield from page.workstation_clusters + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkstationClustersAsyncPager: + """A pager for iterating through ``list_workstation_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1.types.ListWorkstationClustersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``workstation_clusters`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListWorkstationClusters`` requests and continue to iterate + through the ``workstation_clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1.types.ListWorkstationClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[workstations.ListWorkstationClustersResponse]], + request: workstations.ListWorkstationClustersRequest, + response: workstations.ListWorkstationClustersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1.types.ListWorkstationClustersRequest): + The initial request object. + response (google.cloud.workstations_v1.types.ListWorkstationClustersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListWorkstationClustersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[workstations.ListWorkstationClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[workstations.WorkstationCluster]: + async def async_generator(): + async for page in self.pages: + for response in page.workstation_clusters: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkstationConfigsPager: + """A pager for iterating through ``list_workstation_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1.types.ListWorkstationConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``workstation_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListWorkstationConfigs`` requests and continue to iterate + through the ``workstation_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1.types.ListWorkstationConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., workstations.ListWorkstationConfigsResponse], + request: workstations.ListWorkstationConfigsRequest, + response: workstations.ListWorkstationConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1.types.ListWorkstationConfigsRequest): + The initial request object. + response (google.cloud.workstations_v1.types.ListWorkstationConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListWorkstationConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[workstations.ListWorkstationConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[workstations.WorkstationConfig]: + for page in self.pages: + yield from page.workstation_configs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkstationConfigsAsyncPager: + """A pager for iterating through ``list_workstation_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1.types.ListWorkstationConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``workstation_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListWorkstationConfigs`` requests and continue to iterate + through the ``workstation_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1.types.ListWorkstationConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[workstations.ListWorkstationConfigsResponse]], + request: workstations.ListWorkstationConfigsRequest, + response: workstations.ListWorkstationConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1.types.ListWorkstationConfigsRequest): + The initial request object. + response (google.cloud.workstations_v1.types.ListWorkstationConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListWorkstationConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[workstations.ListWorkstationConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[workstations.WorkstationConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.workstation_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUsableWorkstationConfigsPager: + """A pager for iterating through ``list_usable_workstation_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1.types.ListUsableWorkstationConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``workstation_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListUsableWorkstationConfigs`` requests and continue to iterate + through the ``workstation_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1.types.ListUsableWorkstationConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., workstations.ListUsableWorkstationConfigsResponse], + request: workstations.ListUsableWorkstationConfigsRequest, + response: workstations.ListUsableWorkstationConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1.types.ListUsableWorkstationConfigsRequest): + The initial request object. + response (google.cloud.workstations_v1.types.ListUsableWorkstationConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListUsableWorkstationConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[workstations.ListUsableWorkstationConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[workstations.WorkstationConfig]: + for page in self.pages: + yield from page.workstation_configs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUsableWorkstationConfigsAsyncPager: + """A pager for iterating through ``list_usable_workstation_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1.types.ListUsableWorkstationConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``workstation_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListUsableWorkstationConfigs`` requests and continue to iterate + through the ``workstation_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1.types.ListUsableWorkstationConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[workstations.ListUsableWorkstationConfigsResponse] + ], + request: workstations.ListUsableWorkstationConfigsRequest, + response: workstations.ListUsableWorkstationConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1.types.ListUsableWorkstationConfigsRequest): + The initial request object. + response (google.cloud.workstations_v1.types.ListUsableWorkstationConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListUsableWorkstationConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[workstations.ListUsableWorkstationConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[workstations.WorkstationConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.workstation_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkstationsPager: + """A pager for iterating through ``list_workstations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1.types.ListWorkstationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``workstations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListWorkstations`` requests and continue to iterate + through the ``workstations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1.types.ListWorkstationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., workstations.ListWorkstationsResponse], + request: workstations.ListWorkstationsRequest, + response: workstations.ListWorkstationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1.types.ListWorkstationsRequest): + The initial request object. + response (google.cloud.workstations_v1.types.ListWorkstationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListWorkstationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[workstations.ListWorkstationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[workstations.Workstation]: + for page in self.pages: + yield from page.workstations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkstationsAsyncPager: + """A pager for iterating through ``list_workstations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1.types.ListWorkstationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``workstations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListWorkstations`` requests and continue to iterate + through the ``workstations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1.types.ListWorkstationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[workstations.ListWorkstationsResponse]], + request: workstations.ListWorkstationsRequest, + response: workstations.ListWorkstationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1.types.ListWorkstationsRequest): + The initial request object. + response (google.cloud.workstations_v1.types.ListWorkstationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListWorkstationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[workstations.ListWorkstationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[workstations.Workstation]: + async def async_generator(): + async for page in self.pages: + for response in page.workstations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUsableWorkstationsPager: + """A pager for iterating through ``list_usable_workstations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1.types.ListUsableWorkstationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``workstations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListUsableWorkstations`` requests and continue to iterate + through the ``workstations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1.types.ListUsableWorkstationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., workstations.ListUsableWorkstationsResponse], + request: workstations.ListUsableWorkstationsRequest, + response: workstations.ListUsableWorkstationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1.types.ListUsableWorkstationsRequest): + The initial request object. + response (google.cloud.workstations_v1.types.ListUsableWorkstationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListUsableWorkstationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[workstations.ListUsableWorkstationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[workstations.Workstation]: + for page in self.pages: + yield from page.workstations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUsableWorkstationsAsyncPager: + """A pager for iterating through ``list_usable_workstations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1.types.ListUsableWorkstationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``workstations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListUsableWorkstations`` requests and continue to iterate + through the ``workstations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1.types.ListUsableWorkstationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[workstations.ListUsableWorkstationsResponse]], + request: workstations.ListUsableWorkstationsRequest, + response: workstations.ListUsableWorkstationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1.types.ListUsableWorkstationsRequest): + The initial request object. + response (google.cloud.workstations_v1.types.ListUsableWorkstationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListUsableWorkstationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[workstations.ListUsableWorkstationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[workstations.Workstation]: + async def async_generator(): + async for page in self.pages: + for response in page.workstations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/transports/__init__.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/transports/__init__.py new file mode 100644 index 000000000000..9facbb0989aa --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import WorkstationsTransport +from .grpc import WorkstationsGrpcTransport +from .grpc_asyncio import WorkstationsGrpcAsyncIOTransport +from .rest import WorkstationsRestInterceptor, WorkstationsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[WorkstationsTransport]] +_transport_registry["grpc"] = WorkstationsGrpcTransport +_transport_registry["grpc_asyncio"] = WorkstationsGrpcAsyncIOTransport +_transport_registry["rest"] = WorkstationsRestTransport + +__all__ = ( + "WorkstationsTransport", + "WorkstationsGrpcTransport", + "WorkstationsGrpcAsyncIOTransport", + "WorkstationsRestTransport", + "WorkstationsRestInterceptor", +) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/transports/base.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/transports/base.py new file mode 100644 index 000000000000..a7aaf8493a39 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/transports/base.py @@ -0,0 +1,596 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.workstations_v1 import gapic_version as package_version +from google.cloud.workstations_v1.types import workstations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class WorkstationsTransport(abc.ABC): + """Abstract transport class for Workstations.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "workstations.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_workstation_cluster: gapic_v1.method.wrap_method( + self.get_workstation_cluster, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_workstation_clusters: gapic_v1.method.wrap_method( + self.list_workstation_clusters, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_workstation_cluster: gapic_v1.method.wrap_method( + self.create_workstation_cluster, + default_timeout=60.0, + client_info=client_info, + ), + self.update_workstation_cluster: gapic_v1.method.wrap_method( + self.update_workstation_cluster, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_workstation_cluster: gapic_v1.method.wrap_method( + self.delete_workstation_cluster, + default_timeout=60.0, + client_info=client_info, + ), + self.get_workstation_config: gapic_v1.method.wrap_method( + self.get_workstation_config, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_workstation_configs: gapic_v1.method.wrap_method( + self.list_workstation_configs, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_usable_workstation_configs: gapic_v1.method.wrap_method( + self.list_usable_workstation_configs, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_workstation_config: gapic_v1.method.wrap_method( + self.create_workstation_config, + default_timeout=60.0, + client_info=client_info, + ), + self.update_workstation_config: gapic_v1.method.wrap_method( + self.update_workstation_config, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_workstation_config: gapic_v1.method.wrap_method( + self.delete_workstation_config, + default_timeout=60.0, + client_info=client_info, + ), + self.get_workstation: gapic_v1.method.wrap_method( + self.get_workstation, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_workstations: gapic_v1.method.wrap_method( + self.list_workstations, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_usable_workstations: gapic_v1.method.wrap_method( + self.list_usable_workstations, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_workstation: gapic_v1.method.wrap_method( + self.create_workstation, + default_timeout=60.0, + client_info=client_info, + ), + self.update_workstation: gapic_v1.method.wrap_method( + self.update_workstation, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_workstation: gapic_v1.method.wrap_method( + self.delete_workstation, + default_timeout=60.0, + client_info=client_info, + ), + self.start_workstation: gapic_v1.method.wrap_method( + self.start_workstation, + default_timeout=60.0, + client_info=client_info, + ), + self.stop_workstation: gapic_v1.method.wrap_method( + self.stop_workstation, + default_timeout=60.0, + client_info=client_info, + ), + self.generate_access_token: gapic_v1.method.wrap_method( + self.generate_access_token, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def get_workstation_cluster( + self, + ) -> Callable[ + [workstations.GetWorkstationClusterRequest], + Union[ + workstations.WorkstationCluster, Awaitable[workstations.WorkstationCluster] + ], + ]: + raise NotImplementedError() + + @property + def list_workstation_clusters( + self, + ) -> Callable[ + [workstations.ListWorkstationClustersRequest], + Union[ + workstations.ListWorkstationClustersResponse, + Awaitable[workstations.ListWorkstationClustersResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_workstation_cluster( + self, + ) -> Callable[ + [workstations.CreateWorkstationClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_workstation_cluster( + self, + ) -> Callable[ + [workstations.UpdateWorkstationClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_workstation_cluster( + self, + ) -> Callable[ + [workstations.DeleteWorkstationClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_workstation_config( + self, + ) -> Callable[ + [workstations.GetWorkstationConfigRequest], + Union[ + workstations.WorkstationConfig, Awaitable[workstations.WorkstationConfig] + ], + ]: + raise NotImplementedError() + + @property + def list_workstation_configs( + self, + ) -> Callable[ + [workstations.ListWorkstationConfigsRequest], + Union[ + workstations.ListWorkstationConfigsResponse, + Awaitable[workstations.ListWorkstationConfigsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_usable_workstation_configs( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationConfigsRequest], + Union[ + workstations.ListUsableWorkstationConfigsResponse, + Awaitable[workstations.ListUsableWorkstationConfigsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_workstation_config( + self, + ) -> Callable[ + [workstations.CreateWorkstationConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_workstation_config( + self, + ) -> Callable[ + [workstations.UpdateWorkstationConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_workstation_config( + self, + ) -> Callable[ + [workstations.DeleteWorkstationConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_workstation( + self, + ) -> Callable[ + [workstations.GetWorkstationRequest], + Union[workstations.Workstation, Awaitable[workstations.Workstation]], + ]: + raise NotImplementedError() + + @property + def list_workstations( + self, + ) -> Callable[ + [workstations.ListWorkstationsRequest], + Union[ + workstations.ListWorkstationsResponse, + Awaitable[workstations.ListWorkstationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_usable_workstations( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationsRequest], + Union[ + workstations.ListUsableWorkstationsResponse, + Awaitable[workstations.ListUsableWorkstationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_workstation( + self, + ) -> Callable[ + [workstations.CreateWorkstationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_workstation( + self, + ) -> Callable[ + [workstations.UpdateWorkstationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_workstation( + self, + ) -> Callable[ + [workstations.DeleteWorkstationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def start_workstation( + self, + ) -> Callable[ + [workstations.StartWorkstationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def stop_workstation( + self, + ) -> Callable[ + [workstations.StopWorkstationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def generate_access_token( + self, + ) -> Callable[ + [workstations.GenerateAccessTokenRequest], + Union[ + workstations.GenerateAccessTokenResponse, + Awaitable[workstations.GenerateAccessTokenResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("WorkstationsTransport",) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/transports/grpc.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/transports/grpc.py new file mode 100644 index 000000000000..41627792b4e6 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/transports/grpc.py @@ -0,0 +1,974 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.workstations_v1.types import workstations + +from .base import DEFAULT_CLIENT_INFO, WorkstationsTransport + + +class WorkstationsGrpcTransport(WorkstationsTransport): + """gRPC backend transport for Workstations. + + Service for interacting with Cloud Workstations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "workstations.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "workstations.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def get_workstation_cluster( + self, + ) -> Callable[ + [workstations.GetWorkstationClusterRequest], workstations.WorkstationCluster + ]: + r"""Return a callable for the get workstation cluster method over gRPC. + + Returns the requested workstation cluster. + + Returns: + Callable[[~.GetWorkstationClusterRequest], + ~.WorkstationCluster]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workstation_cluster" not in self._stubs: + self._stubs["get_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/GetWorkstationCluster", + request_serializer=workstations.GetWorkstationClusterRequest.serialize, + response_deserializer=workstations.WorkstationCluster.deserialize, + ) + return self._stubs["get_workstation_cluster"] + + @property + def list_workstation_clusters( + self, + ) -> Callable[ + [workstations.ListWorkstationClustersRequest], + workstations.ListWorkstationClustersResponse, + ]: + r"""Return a callable for the list workstation clusters method over gRPC. + + Returns all workstation clusters in the specified + location. + + Returns: + Callable[[~.ListWorkstationClustersRequest], + ~.ListWorkstationClustersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workstation_clusters" not in self._stubs: + self._stubs["list_workstation_clusters"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/ListWorkstationClusters", + request_serializer=workstations.ListWorkstationClustersRequest.serialize, + response_deserializer=workstations.ListWorkstationClustersResponse.deserialize, + ) + return self._stubs["list_workstation_clusters"] + + @property + def create_workstation_cluster( + self, + ) -> Callable[ + [workstations.CreateWorkstationClusterRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create workstation cluster method over gRPC. + + Creates a new workstation cluster. + + Returns: + Callable[[~.CreateWorkstationClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workstation_cluster" not in self._stubs: + self._stubs["create_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/CreateWorkstationCluster", + request_serializer=workstations.CreateWorkstationClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workstation_cluster"] + + @property + def update_workstation_cluster( + self, + ) -> Callable[ + [workstations.UpdateWorkstationClusterRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update workstation cluster method over gRPC. + + Updates an existing workstation cluster. + + Returns: + Callable[[~.UpdateWorkstationClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workstation_cluster" not in self._stubs: + self._stubs["update_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/UpdateWorkstationCluster", + request_serializer=workstations.UpdateWorkstationClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_workstation_cluster"] + + @property + def delete_workstation_cluster( + self, + ) -> Callable[ + [workstations.DeleteWorkstationClusterRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete workstation cluster method over gRPC. + + Deletes the specified workstation cluster. + + Returns: + Callable[[~.DeleteWorkstationClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workstation_cluster" not in self._stubs: + self._stubs["delete_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/DeleteWorkstationCluster", + request_serializer=workstations.DeleteWorkstationClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_workstation_cluster"] + + @property + def get_workstation_config( + self, + ) -> Callable[ + [workstations.GetWorkstationConfigRequest], workstations.WorkstationConfig + ]: + r"""Return a callable for the get workstation config method over gRPC. + + Returns the requested workstation configuration. + + Returns: + Callable[[~.GetWorkstationConfigRequest], + ~.WorkstationConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workstation_config" not in self._stubs: + self._stubs["get_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/GetWorkstationConfig", + request_serializer=workstations.GetWorkstationConfigRequest.serialize, + response_deserializer=workstations.WorkstationConfig.deserialize, + ) + return self._stubs["get_workstation_config"] + + @property + def list_workstation_configs( + self, + ) -> Callable[ + [workstations.ListWorkstationConfigsRequest], + workstations.ListWorkstationConfigsResponse, + ]: + r"""Return a callable for the list workstation configs method over gRPC. + + Returns all workstation configurations in the + specified cluster. + + Returns: + Callable[[~.ListWorkstationConfigsRequest], + ~.ListWorkstationConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workstation_configs" not in self._stubs: + self._stubs["list_workstation_configs"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/ListWorkstationConfigs", + request_serializer=workstations.ListWorkstationConfigsRequest.serialize, + response_deserializer=workstations.ListWorkstationConfigsResponse.deserialize, + ) + return self._stubs["list_workstation_configs"] + + @property + def list_usable_workstation_configs( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationConfigsRequest], + workstations.ListUsableWorkstationConfigsResponse, + ]: + r"""Return a callable for the list usable workstation + configs method over gRPC. + + Returns all workstation configurations in the + specified cluster on which the caller has the + "workstations.workstation.create" permission. + + Returns: + Callable[[~.ListUsableWorkstationConfigsRequest], + ~.ListUsableWorkstationConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_usable_workstation_configs" not in self._stubs: + self._stubs[ + "list_usable_workstation_configs" + ] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/ListUsableWorkstationConfigs", + request_serializer=workstations.ListUsableWorkstationConfigsRequest.serialize, + response_deserializer=workstations.ListUsableWorkstationConfigsResponse.deserialize, + ) + return self._stubs["list_usable_workstation_configs"] + + @property + def create_workstation_config( + self, + ) -> Callable[ + [workstations.CreateWorkstationConfigRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create workstation config method over gRPC. + + Creates a new workstation configuration. + + Returns: + Callable[[~.CreateWorkstationConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workstation_config" not in self._stubs: + self._stubs["create_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/CreateWorkstationConfig", + request_serializer=workstations.CreateWorkstationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workstation_config"] + + @property + def update_workstation_config( + self, + ) -> Callable[ + [workstations.UpdateWorkstationConfigRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update workstation config method over gRPC. + + Updates an existing workstation configuration. + + Returns: + Callable[[~.UpdateWorkstationConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workstation_config" not in self._stubs: + self._stubs["update_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/UpdateWorkstationConfig", + request_serializer=workstations.UpdateWorkstationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_workstation_config"] + + @property + def delete_workstation_config( + self, + ) -> Callable[ + [workstations.DeleteWorkstationConfigRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete workstation config method over gRPC. + + Deletes the specified workstation configuration. + + Returns: + Callable[[~.DeleteWorkstationConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workstation_config" not in self._stubs: + self._stubs["delete_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/DeleteWorkstationConfig", + request_serializer=workstations.DeleteWorkstationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_workstation_config"] + + @property + def get_workstation( + self, + ) -> Callable[[workstations.GetWorkstationRequest], workstations.Workstation]: + r"""Return a callable for the get workstation method over gRPC. + + Returns the requested workstation. + + Returns: + Callable[[~.GetWorkstationRequest], + ~.Workstation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workstation" not in self._stubs: + self._stubs["get_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/GetWorkstation", + request_serializer=workstations.GetWorkstationRequest.serialize, + response_deserializer=workstations.Workstation.deserialize, + ) + return self._stubs["get_workstation"] + + @property + def list_workstations( + self, + ) -> Callable[ + [workstations.ListWorkstationsRequest], workstations.ListWorkstationsResponse + ]: + r"""Return a callable for the list workstations method over gRPC. + + Returns all Workstations using the specified + workstation configuration. + + Returns: + Callable[[~.ListWorkstationsRequest], + ~.ListWorkstationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workstations" not in self._stubs: + self._stubs["list_workstations"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/ListWorkstations", + request_serializer=workstations.ListWorkstationsRequest.serialize, + response_deserializer=workstations.ListWorkstationsResponse.deserialize, + ) + return self._stubs["list_workstations"] + + @property + def list_usable_workstations( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationsRequest], + workstations.ListUsableWorkstationsResponse, + ]: + r"""Return a callable for the list usable workstations method over gRPC. + + Returns all workstations using the specified + workstation configuration on which the caller has the + "workstations.workstations.use" permission. + + Returns: + Callable[[~.ListUsableWorkstationsRequest], + ~.ListUsableWorkstationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_usable_workstations" not in self._stubs: + self._stubs["list_usable_workstations"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/ListUsableWorkstations", + request_serializer=workstations.ListUsableWorkstationsRequest.serialize, + response_deserializer=workstations.ListUsableWorkstationsResponse.deserialize, + ) + return self._stubs["list_usable_workstations"] + + @property + def create_workstation( + self, + ) -> Callable[[workstations.CreateWorkstationRequest], operations_pb2.Operation]: + r"""Return a callable for the create workstation method over gRPC. + + Creates a new workstation. + + Returns: + Callable[[~.CreateWorkstationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workstation" not in self._stubs: + self._stubs["create_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/CreateWorkstation", + request_serializer=workstations.CreateWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workstation"] + + @property + def update_workstation( + self, + ) -> Callable[[workstations.UpdateWorkstationRequest], operations_pb2.Operation]: + r"""Return a callable for the update workstation method over gRPC. + + Updates an existing workstation. + + Returns: + Callable[[~.UpdateWorkstationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workstation" not in self._stubs: + self._stubs["update_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/UpdateWorkstation", + request_serializer=workstations.UpdateWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_workstation"] + + @property + def delete_workstation( + self, + ) -> Callable[[workstations.DeleteWorkstationRequest], operations_pb2.Operation]: + r"""Return a callable for the delete workstation method over gRPC. + + Deletes the specified workstation. + + Returns: + Callable[[~.DeleteWorkstationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workstation" not in self._stubs: + self._stubs["delete_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/DeleteWorkstation", + request_serializer=workstations.DeleteWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_workstation"] + + @property + def start_workstation( + self, + ) -> Callable[[workstations.StartWorkstationRequest], operations_pb2.Operation]: + r"""Return a callable for the start workstation method over gRPC. + + Starts running a workstation so that users can + connect to it. + + Returns: + Callable[[~.StartWorkstationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_workstation" not in self._stubs: + self._stubs["start_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/StartWorkstation", + request_serializer=workstations.StartWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["start_workstation"] + + @property + def stop_workstation( + self, + ) -> Callable[[workstations.StopWorkstationRequest], operations_pb2.Operation]: + r"""Return a callable for the stop workstation method over gRPC. + + Stops running a workstation, reducing costs. + + Returns: + Callable[[~.StopWorkstationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_workstation" not in self._stubs: + self._stubs["stop_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/StopWorkstation", + request_serializer=workstations.StopWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["stop_workstation"] + + @property + def generate_access_token( + self, + ) -> Callable[ + [workstations.GenerateAccessTokenRequest], + workstations.GenerateAccessTokenResponse, + ]: + r"""Return a callable for the generate access token method over gRPC. + + Returns a short-lived credential that can be used to + send authenticated and authorized traffic to a + workstation. + + Returns: + Callable[[~.GenerateAccessTokenRequest], + ~.GenerateAccessTokenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_access_token" not in self._stubs: + self._stubs["generate_access_token"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/GenerateAccessToken", + request_serializer=workstations.GenerateAccessTokenRequest.serialize, + response_deserializer=workstations.GenerateAccessTokenResponse.deserialize, + ) + return self._stubs["generate_access_token"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("WorkstationsGrpcTransport",) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/transports/grpc_asyncio.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/transports/grpc_asyncio.py new file mode 100644 index 000000000000..ca8988437512 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/transports/grpc_asyncio.py @@ -0,0 +1,996 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.workstations_v1.types import workstations + +from .base import DEFAULT_CLIENT_INFO, WorkstationsTransport +from .grpc import WorkstationsGrpcTransport + + +class WorkstationsGrpcAsyncIOTransport(WorkstationsTransport): + """gRPC AsyncIO backend transport for Workstations. + + Service for interacting with Cloud Workstations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "workstations.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "workstations.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def get_workstation_cluster( + self, + ) -> Callable[ + [workstations.GetWorkstationClusterRequest], + Awaitable[workstations.WorkstationCluster], + ]: + r"""Return a callable for the get workstation cluster method over gRPC. + + Returns the requested workstation cluster. + + Returns: + Callable[[~.GetWorkstationClusterRequest], + Awaitable[~.WorkstationCluster]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workstation_cluster" not in self._stubs: + self._stubs["get_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/GetWorkstationCluster", + request_serializer=workstations.GetWorkstationClusterRequest.serialize, + response_deserializer=workstations.WorkstationCluster.deserialize, + ) + return self._stubs["get_workstation_cluster"] + + @property + def list_workstation_clusters( + self, + ) -> Callable[ + [workstations.ListWorkstationClustersRequest], + Awaitable[workstations.ListWorkstationClustersResponse], + ]: + r"""Return a callable for the list workstation clusters method over gRPC. + + Returns all workstation clusters in the specified + location. + + Returns: + Callable[[~.ListWorkstationClustersRequest], + Awaitable[~.ListWorkstationClustersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workstation_clusters" not in self._stubs: + self._stubs["list_workstation_clusters"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/ListWorkstationClusters", + request_serializer=workstations.ListWorkstationClustersRequest.serialize, + response_deserializer=workstations.ListWorkstationClustersResponse.deserialize, + ) + return self._stubs["list_workstation_clusters"] + + @property + def create_workstation_cluster( + self, + ) -> Callable[ + [workstations.CreateWorkstationClusterRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create workstation cluster method over gRPC. + + Creates a new workstation cluster. + + Returns: + Callable[[~.CreateWorkstationClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workstation_cluster" not in self._stubs: + self._stubs["create_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/CreateWorkstationCluster", + request_serializer=workstations.CreateWorkstationClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workstation_cluster"] + + @property + def update_workstation_cluster( + self, + ) -> Callable[ + [workstations.UpdateWorkstationClusterRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update workstation cluster method over gRPC. + + Updates an existing workstation cluster. + + Returns: + Callable[[~.UpdateWorkstationClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workstation_cluster" not in self._stubs: + self._stubs["update_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/UpdateWorkstationCluster", + request_serializer=workstations.UpdateWorkstationClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_workstation_cluster"] + + @property + def delete_workstation_cluster( + self, + ) -> Callable[ + [workstations.DeleteWorkstationClusterRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete workstation cluster method over gRPC. + + Deletes the specified workstation cluster. + + Returns: + Callable[[~.DeleteWorkstationClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workstation_cluster" not in self._stubs: + self._stubs["delete_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/DeleteWorkstationCluster", + request_serializer=workstations.DeleteWorkstationClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_workstation_cluster"] + + @property + def get_workstation_config( + self, + ) -> Callable[ + [workstations.GetWorkstationConfigRequest], + Awaitable[workstations.WorkstationConfig], + ]: + r"""Return a callable for the get workstation config method over gRPC. + + Returns the requested workstation configuration. + + Returns: + Callable[[~.GetWorkstationConfigRequest], + Awaitable[~.WorkstationConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workstation_config" not in self._stubs: + self._stubs["get_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/GetWorkstationConfig", + request_serializer=workstations.GetWorkstationConfigRequest.serialize, + response_deserializer=workstations.WorkstationConfig.deserialize, + ) + return self._stubs["get_workstation_config"] + + @property + def list_workstation_configs( + self, + ) -> Callable[ + [workstations.ListWorkstationConfigsRequest], + Awaitable[workstations.ListWorkstationConfigsResponse], + ]: + r"""Return a callable for the list workstation configs method over gRPC. + + Returns all workstation configurations in the + specified cluster. + + Returns: + Callable[[~.ListWorkstationConfigsRequest], + Awaitable[~.ListWorkstationConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workstation_configs" not in self._stubs: + self._stubs["list_workstation_configs"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/ListWorkstationConfigs", + request_serializer=workstations.ListWorkstationConfigsRequest.serialize, + response_deserializer=workstations.ListWorkstationConfigsResponse.deserialize, + ) + return self._stubs["list_workstation_configs"] + + @property + def list_usable_workstation_configs( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationConfigsRequest], + Awaitable[workstations.ListUsableWorkstationConfigsResponse], + ]: + r"""Return a callable for the list usable workstation + configs method over gRPC. + + Returns all workstation configurations in the + specified cluster on which the caller has the + "workstations.workstation.create" permission. + + Returns: + Callable[[~.ListUsableWorkstationConfigsRequest], + Awaitable[~.ListUsableWorkstationConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_usable_workstation_configs" not in self._stubs: + self._stubs[ + "list_usable_workstation_configs" + ] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/ListUsableWorkstationConfigs", + request_serializer=workstations.ListUsableWorkstationConfigsRequest.serialize, + response_deserializer=workstations.ListUsableWorkstationConfigsResponse.deserialize, + ) + return self._stubs["list_usable_workstation_configs"] + + @property + def create_workstation_config( + self, + ) -> Callable[ + [workstations.CreateWorkstationConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create workstation config method over gRPC. + + Creates a new workstation configuration. + + Returns: + Callable[[~.CreateWorkstationConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workstation_config" not in self._stubs: + self._stubs["create_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/CreateWorkstationConfig", + request_serializer=workstations.CreateWorkstationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workstation_config"] + + @property + def update_workstation_config( + self, + ) -> Callable[ + [workstations.UpdateWorkstationConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update workstation config method over gRPC. + + Updates an existing workstation configuration. + + Returns: + Callable[[~.UpdateWorkstationConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workstation_config" not in self._stubs: + self._stubs["update_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/UpdateWorkstationConfig", + request_serializer=workstations.UpdateWorkstationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_workstation_config"] + + @property + def delete_workstation_config( + self, + ) -> Callable[ + [workstations.DeleteWorkstationConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete workstation config method over gRPC. + + Deletes the specified workstation configuration. + + Returns: + Callable[[~.DeleteWorkstationConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workstation_config" not in self._stubs: + self._stubs["delete_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/DeleteWorkstationConfig", + request_serializer=workstations.DeleteWorkstationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_workstation_config"] + + @property + def get_workstation( + self, + ) -> Callable[ + [workstations.GetWorkstationRequest], Awaitable[workstations.Workstation] + ]: + r"""Return a callable for the get workstation method over gRPC. + + Returns the requested workstation. + + Returns: + Callable[[~.GetWorkstationRequest], + Awaitable[~.Workstation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workstation" not in self._stubs: + self._stubs["get_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/GetWorkstation", + request_serializer=workstations.GetWorkstationRequest.serialize, + response_deserializer=workstations.Workstation.deserialize, + ) + return self._stubs["get_workstation"] + + @property + def list_workstations( + self, + ) -> Callable[ + [workstations.ListWorkstationsRequest], + Awaitable[workstations.ListWorkstationsResponse], + ]: + r"""Return a callable for the list workstations method over gRPC. + + Returns all Workstations using the specified + workstation configuration. + + Returns: + Callable[[~.ListWorkstationsRequest], + Awaitable[~.ListWorkstationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workstations" not in self._stubs: + self._stubs["list_workstations"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/ListWorkstations", + request_serializer=workstations.ListWorkstationsRequest.serialize, + response_deserializer=workstations.ListWorkstationsResponse.deserialize, + ) + return self._stubs["list_workstations"] + + @property + def list_usable_workstations( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationsRequest], + Awaitable[workstations.ListUsableWorkstationsResponse], + ]: + r"""Return a callable for the list usable workstations method over gRPC. + + Returns all workstations using the specified + workstation configuration on which the caller has the + "workstations.workstations.use" permission. + + Returns: + Callable[[~.ListUsableWorkstationsRequest], + Awaitable[~.ListUsableWorkstationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_usable_workstations" not in self._stubs: + self._stubs["list_usable_workstations"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/ListUsableWorkstations", + request_serializer=workstations.ListUsableWorkstationsRequest.serialize, + response_deserializer=workstations.ListUsableWorkstationsResponse.deserialize, + ) + return self._stubs["list_usable_workstations"] + + @property + def create_workstation( + self, + ) -> Callable[ + [workstations.CreateWorkstationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create workstation method over gRPC. + + Creates a new workstation. + + Returns: + Callable[[~.CreateWorkstationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workstation" not in self._stubs: + self._stubs["create_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/CreateWorkstation", + request_serializer=workstations.CreateWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workstation"] + + @property + def update_workstation( + self, + ) -> Callable[ + [workstations.UpdateWorkstationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update workstation method over gRPC. + + Updates an existing workstation. + + Returns: + Callable[[~.UpdateWorkstationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workstation" not in self._stubs: + self._stubs["update_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/UpdateWorkstation", + request_serializer=workstations.UpdateWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_workstation"] + + @property + def delete_workstation( + self, + ) -> Callable[ + [workstations.DeleteWorkstationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete workstation method over gRPC. + + Deletes the specified workstation. + + Returns: + Callable[[~.DeleteWorkstationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workstation" not in self._stubs: + self._stubs["delete_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/DeleteWorkstation", + request_serializer=workstations.DeleteWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_workstation"] + + @property + def start_workstation( + self, + ) -> Callable[ + [workstations.StartWorkstationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the start workstation method over gRPC. + + Starts running a workstation so that users can + connect to it. + + Returns: + Callable[[~.StartWorkstationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_workstation" not in self._stubs: + self._stubs["start_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/StartWorkstation", + request_serializer=workstations.StartWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["start_workstation"] + + @property + def stop_workstation( + self, + ) -> Callable[ + [workstations.StopWorkstationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the stop workstation method over gRPC. + + Stops running a workstation, reducing costs. + + Returns: + Callable[[~.StopWorkstationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_workstation" not in self._stubs: + self._stubs["stop_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/StopWorkstation", + request_serializer=workstations.StopWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["stop_workstation"] + + @property + def generate_access_token( + self, + ) -> Callable[ + [workstations.GenerateAccessTokenRequest], + Awaitable[workstations.GenerateAccessTokenResponse], + ]: + r"""Return a callable for the generate access token method over gRPC. + + Returns a short-lived credential that can be used to + send authenticated and authorized traffic to a + workstation. + + Returns: + Callable[[~.GenerateAccessTokenRequest], + Awaitable[~.GenerateAccessTokenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_access_token" not in self._stubs: + self._stubs["generate_access_token"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1.Workstations/GenerateAccessToken", + request_serializer=workstations.GenerateAccessTokenRequest.serialize, + response_deserializer=workstations.GenerateAccessTokenResponse.deserialize, + ) + return self._stubs["generate_access_token"] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("WorkstationsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/transports/rest.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/transports/rest.py new file mode 100644 index 000000000000..37c18639ffde --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/services/workstations/transports/rest.py @@ -0,0 +1,3602 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.workstations_v1.types import workstations + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import WorkstationsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class WorkstationsRestInterceptor: + """Interceptor for Workstations. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the WorkstationsRestTransport. + + .. code-block:: python + class MyCustomWorkstationsInterceptor(WorkstationsRestInterceptor): + def pre_create_workstation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_workstation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_workstation_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_workstation_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_workstation_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_workstation_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_workstation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_workstation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_workstation_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_workstation_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_workstation_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_workstation_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_access_token(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_access_token(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_workstation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_workstation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_workstation_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_workstation_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_workstation_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_workstation_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_usable_workstation_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_usable_workstation_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_usable_workstations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_usable_workstations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_workstation_clusters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_workstation_clusters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_workstation_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_workstation_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_workstations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_workstations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_start_workstation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_start_workstation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_stop_workstation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_stop_workstation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_workstation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_workstation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_workstation_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_workstation_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_workstation_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_workstation_config(self, response): + logging.log(f"Received response: {response}") + return response + + transport = WorkstationsRestTransport(interceptor=MyCustomWorkstationsInterceptor()) + client = WorkstationsClient(transport=transport) + + + """ + + def pre_create_workstation( + self, + request: workstations.CreateWorkstationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.CreateWorkstationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_workstation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_create_workstation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_workstation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_create_workstation_cluster( + self, + request: workstations.CreateWorkstationClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.CreateWorkstationClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_workstation_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_create_workstation_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_workstation_cluster + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_create_workstation_config( + self, + request: workstations.CreateWorkstationConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.CreateWorkstationConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_workstation_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_create_workstation_config( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_workstation_config + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_delete_workstation( + self, + request: workstations.DeleteWorkstationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.DeleteWorkstationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_workstation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_delete_workstation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_workstation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_delete_workstation_cluster( + self, + request: workstations.DeleteWorkstationClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.DeleteWorkstationClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_workstation_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_delete_workstation_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_workstation_cluster + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_delete_workstation_config( + self, + request: workstations.DeleteWorkstationConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.DeleteWorkstationConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_workstation_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_delete_workstation_config( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_workstation_config + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_generate_access_token( + self, + request: workstations.GenerateAccessTokenRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.GenerateAccessTokenRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_access_token + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_generate_access_token( + self, response: workstations.GenerateAccessTokenResponse + ) -> workstations.GenerateAccessTokenResponse: + """Post-rpc interceptor for generate_access_token + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_get_workstation( + self, + request: workstations.GetWorkstationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.GetWorkstationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_workstation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_get_workstation( + self, response: workstations.Workstation + ) -> workstations.Workstation: + """Post-rpc interceptor for get_workstation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_get_workstation_cluster( + self, + request: workstations.GetWorkstationClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.GetWorkstationClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_workstation_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_get_workstation_cluster( + self, response: workstations.WorkstationCluster + ) -> workstations.WorkstationCluster: + """Post-rpc interceptor for get_workstation_cluster + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_get_workstation_config( + self, + request: workstations.GetWorkstationConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.GetWorkstationConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_workstation_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_get_workstation_config( + self, response: workstations.WorkstationConfig + ) -> workstations.WorkstationConfig: + """Post-rpc interceptor for get_workstation_config + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_list_usable_workstation_configs( + self, + request: workstations.ListUsableWorkstationConfigsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + workstations.ListUsableWorkstationConfigsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_usable_workstation_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_list_usable_workstation_configs( + self, response: workstations.ListUsableWorkstationConfigsResponse + ) -> workstations.ListUsableWorkstationConfigsResponse: + """Post-rpc interceptor for list_usable_workstation_configs + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_list_usable_workstations( + self, + request: workstations.ListUsableWorkstationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.ListUsableWorkstationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_usable_workstations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_list_usable_workstations( + self, response: workstations.ListUsableWorkstationsResponse + ) -> workstations.ListUsableWorkstationsResponse: + """Post-rpc interceptor for list_usable_workstations + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_list_workstation_clusters( + self, + request: workstations.ListWorkstationClustersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.ListWorkstationClustersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_workstation_clusters + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_list_workstation_clusters( + self, response: workstations.ListWorkstationClustersResponse + ) -> workstations.ListWorkstationClustersResponse: + """Post-rpc interceptor for list_workstation_clusters + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_list_workstation_configs( + self, + request: workstations.ListWorkstationConfigsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.ListWorkstationConfigsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_workstation_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_list_workstation_configs( + self, response: workstations.ListWorkstationConfigsResponse + ) -> workstations.ListWorkstationConfigsResponse: + """Post-rpc interceptor for list_workstation_configs + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_list_workstations( + self, + request: workstations.ListWorkstationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.ListWorkstationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_workstations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_list_workstations( + self, response: workstations.ListWorkstationsResponse + ) -> workstations.ListWorkstationsResponse: + """Post-rpc interceptor for list_workstations + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_start_workstation( + self, + request: workstations.StartWorkstationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.StartWorkstationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for start_workstation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_start_workstation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for start_workstation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_stop_workstation( + self, + request: workstations.StopWorkstationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.StopWorkstationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for stop_workstation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_stop_workstation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for stop_workstation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_update_workstation( + self, + request: workstations.UpdateWorkstationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.UpdateWorkstationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_workstation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_update_workstation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_workstation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_update_workstation_cluster( + self, + request: workstations.UpdateWorkstationClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.UpdateWorkstationClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_workstation_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_update_workstation_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_workstation_cluster + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_update_workstation_config( + self, + request: workstations.UpdateWorkstationConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.UpdateWorkstationConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_workstation_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_update_workstation_config( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_workstation_config + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class WorkstationsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: WorkstationsRestInterceptor + + +class WorkstationsRestTransport(WorkstationsTransport): + """REST backend transport for Workstations. + + Service for interacting with Cloud Workstations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "workstations.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[WorkstationsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or WorkstationsRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateWorkstation(WorkstationsRestStub): + def __hash__(self): + return hash("CreateWorkstation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "workstationId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.CreateWorkstationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create workstation method over HTTP. + + Args: + request (~.workstations.CreateWorkstationRequest): + The request object. Message for creating a + CreateWorkstation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}/workstations", + "body": "workstation", + }, + ] + request, metadata = self._interceptor.pre_create_workstation( + request, metadata + ) + pb_request = workstations.CreateWorkstationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_workstation(resp) + return resp + + class _CreateWorkstationCluster(WorkstationsRestStub): + def __hash__(self): + return hash("CreateWorkstationCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "workstationClusterId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.CreateWorkstationClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create workstation + cluster method over HTTP. + + Args: + request (~.workstations.CreateWorkstationClusterRequest): + The request object. Message for creating a + CreateWorkstationCluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/workstationClusters", + "body": "workstation_cluster", + }, + ] + request, metadata = self._interceptor.pre_create_workstation_cluster( + request, metadata + ) + pb_request = workstations.CreateWorkstationClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_workstation_cluster(resp) + return resp + + class _CreateWorkstationConfig(WorkstationsRestStub): + def __hash__(self): + return hash("CreateWorkstationConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "workstationConfigId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.CreateWorkstationConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create workstation config method over HTTP. + + Args: + request (~.workstations.CreateWorkstationConfigRequest): + The request object. Message for creating a + CreateWorkstationConfig. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/workstationClusters/*}/workstationConfigs", + "body": "workstation_config", + }, + ] + request, metadata = self._interceptor.pre_create_workstation_config( + request, metadata + ) + pb_request = workstations.CreateWorkstationConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_workstation_config(resp) + return resp + + class _DeleteWorkstation(WorkstationsRestStub): + def __hash__(self): + return hash("DeleteWorkstation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.DeleteWorkstationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete workstation method over HTTP. + + Args: + request (~.workstations.DeleteWorkstationRequest): + The request object. Request message for + DeleteWorkstation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_workstation( + request, metadata + ) + pb_request = workstations.DeleteWorkstationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_workstation(resp) + return resp + + class _DeleteWorkstationCluster(WorkstationsRestStub): + def __hash__(self): + return hash("DeleteWorkstationCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.DeleteWorkstationClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete workstation + cluster method over HTTP. + + Args: + request (~.workstations.DeleteWorkstationClusterRequest): + The request object. Message for deleting a workstation + cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/workstationClusters/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_workstation_cluster( + request, metadata + ) + pb_request = workstations.DeleteWorkstationClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_workstation_cluster(resp) + return resp + + class _DeleteWorkstationConfig(WorkstationsRestStub): + def __hash__(self): + return hash("DeleteWorkstationConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.DeleteWorkstationConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete workstation config method over HTTP. + + Args: + request (~.workstations.DeleteWorkstationConfigRequest): + The request object. Message for deleting a workstation + configuration. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_workstation_config( + request, metadata + ) + pb_request = workstations.DeleteWorkstationConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_workstation_config(resp) + return resp + + class _GenerateAccessToken(WorkstationsRestStub): + def __hash__(self): + return hash("GenerateAccessToken") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.GenerateAccessTokenRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.GenerateAccessTokenResponse: + r"""Call the generate access token method over HTTP. + + Args: + request (~.workstations.GenerateAccessTokenRequest): + The request object. Request message for + GenerateAccessToken. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.GenerateAccessTokenResponse: + Response message for + GenerateAccessToken. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{workstation=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:generateAccessToken", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_generate_access_token( + request, metadata + ) + pb_request = workstations.GenerateAccessTokenRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.GenerateAccessTokenResponse() + pb_resp = workstations.GenerateAccessTokenResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_access_token(resp) + return resp + + class _GetWorkstation(WorkstationsRestStub): + def __hash__(self): + return hash("GetWorkstation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.GetWorkstationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.Workstation: + r"""Call the get workstation method over HTTP. + + Args: + request (~.workstations.GetWorkstationRequest): + The request object. Request message for GetWorkstation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.Workstation: + A single instance of a developer + workstation with its own persistent + storage. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_workstation(request, metadata) + pb_request = workstations.GetWorkstationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.Workstation() + pb_resp = workstations.Workstation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_workstation(resp) + return resp + + class _GetWorkstationCluster(WorkstationsRestStub): + def __hash__(self): + return hash("GetWorkstationCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.GetWorkstationClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.WorkstationCluster: + r"""Call the get workstation cluster method over HTTP. + + Args: + request (~.workstations.GetWorkstationClusterRequest): + The request object. Request message for + GetWorkstationCluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.WorkstationCluster: + A grouping of workstation + configurations and the associated + workstations in that region. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/workstationClusters/*}", + }, + ] + request, metadata = self._interceptor.pre_get_workstation_cluster( + request, metadata + ) + pb_request = workstations.GetWorkstationClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.WorkstationCluster() + pb_resp = workstations.WorkstationCluster.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_workstation_cluster(resp) + return resp + + class _GetWorkstationConfig(WorkstationsRestStub): + def __hash__(self): + return hash("GetWorkstationConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.GetWorkstationConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.WorkstationConfig: + r"""Call the get workstation config method over HTTP. + + Args: + request (~.workstations.GetWorkstationConfigRequest): + The request object. Request message for + GetWorkstationConfig. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.WorkstationConfig: + A set of configuration options + describing how a workstation will be + run. Workstation configurations are + intended to be shared across multiple + workstations. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_workstation_config( + request, metadata + ) + pb_request = workstations.GetWorkstationConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.WorkstationConfig() + pb_resp = workstations.WorkstationConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_workstation_config(resp) + return resp + + class _ListUsableWorkstationConfigs(WorkstationsRestStub): + def __hash__(self): + return hash("ListUsableWorkstationConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.ListUsableWorkstationConfigsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.ListUsableWorkstationConfigsResponse: + r"""Call the list usable workstation + configs method over HTTP. + + Args: + request (~.workstations.ListUsableWorkstationConfigsRequest): + The request object. Request message for + ListUsableWorkstationConfigs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.ListUsableWorkstationConfigsResponse: + Response message for + ListUsableWorkstationConfigs. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/workstationClusters/*}/workstationConfigs:listUsable", + }, + ] + request, metadata = self._interceptor.pre_list_usable_workstation_configs( + request, metadata + ) + pb_request = workstations.ListUsableWorkstationConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.ListUsableWorkstationConfigsResponse() + pb_resp = workstations.ListUsableWorkstationConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_usable_workstation_configs(resp) + return resp + + class _ListUsableWorkstations(WorkstationsRestStub): + def __hash__(self): + return hash("ListUsableWorkstations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.ListUsableWorkstationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.ListUsableWorkstationsResponse: + r"""Call the list usable workstations method over HTTP. + + Args: + request (~.workstations.ListUsableWorkstationsRequest): + The request object. Request message for + ListUsableWorkstations. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.ListUsableWorkstationsResponse: + Response message for + ListUsableWorkstations. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}/workstations:listUsable", + }, + ] + request, metadata = self._interceptor.pre_list_usable_workstations( + request, metadata + ) + pb_request = workstations.ListUsableWorkstationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.ListUsableWorkstationsResponse() + pb_resp = workstations.ListUsableWorkstationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_usable_workstations(resp) + return resp + + class _ListWorkstationClusters(WorkstationsRestStub): + def __hash__(self): + return hash("ListWorkstationClusters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.ListWorkstationClustersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.ListWorkstationClustersResponse: + r"""Call the list workstation clusters method over HTTP. + + Args: + request (~.workstations.ListWorkstationClustersRequest): + The request object. Request message for + ListWorkstationClusters. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.ListWorkstationClustersResponse: + Response message for + ListWorkstationClusters. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/workstationClusters", + }, + ] + request, metadata = self._interceptor.pre_list_workstation_clusters( + request, metadata + ) + pb_request = workstations.ListWorkstationClustersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.ListWorkstationClustersResponse() + pb_resp = workstations.ListWorkstationClustersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_workstation_clusters(resp) + return resp + + class _ListWorkstationConfigs(WorkstationsRestStub): + def __hash__(self): + return hash("ListWorkstationConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.ListWorkstationConfigsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.ListWorkstationConfigsResponse: + r"""Call the list workstation configs method over HTTP. + + Args: + request (~.workstations.ListWorkstationConfigsRequest): + The request object. Request message for + ListWorkstationConfigs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.ListWorkstationConfigsResponse: + Response message for + ListWorkstationConfigs. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/workstationClusters/*}/workstationConfigs", + }, + ] + request, metadata = self._interceptor.pre_list_workstation_configs( + request, metadata + ) + pb_request = workstations.ListWorkstationConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.ListWorkstationConfigsResponse() + pb_resp = workstations.ListWorkstationConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_workstation_configs(resp) + return resp + + class _ListWorkstations(WorkstationsRestStub): + def __hash__(self): + return hash("ListWorkstations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.ListWorkstationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.ListWorkstationsResponse: + r"""Call the list workstations method over HTTP. + + Args: + request (~.workstations.ListWorkstationsRequest): + The request object. Request message for ListWorkstations. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.ListWorkstationsResponse: + Response message for + ListWorkstations. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}/workstations", + }, + ] + request, metadata = self._interceptor.pre_list_workstations( + request, metadata + ) + pb_request = workstations.ListWorkstationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.ListWorkstationsResponse() + pb_resp = workstations.ListWorkstationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_workstations(resp) + return resp + + class _StartWorkstation(WorkstationsRestStub): + def __hash__(self): + return hash("StartWorkstation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.StartWorkstationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the start workstation method over HTTP. + + Args: + request (~.workstations.StartWorkstationRequest): + The request object. Request message for StartWorkstation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:start", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_start_workstation( + request, metadata + ) + pb_request = workstations.StartWorkstationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_start_workstation(resp) + return resp + + class _StopWorkstation(WorkstationsRestStub): + def __hash__(self): + return hash("StopWorkstation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.StopWorkstationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the stop workstation method over HTTP. + + Args: + request (~.workstations.StopWorkstationRequest): + The request object. Request message for StopWorkstation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:stop", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_stop_workstation( + request, metadata + ) + pb_request = workstations.StopWorkstationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_stop_workstation(resp) + return resp + + class _UpdateWorkstation(WorkstationsRestStub): + def __hash__(self): + return hash("UpdateWorkstation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.UpdateWorkstationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update workstation method over HTTP. + + Args: + request (~.workstations.UpdateWorkstationRequest): + The request object. Request message for + UpdateWorkstation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{workstation.name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}", + "body": "workstation", + }, + ] + request, metadata = self._interceptor.pre_update_workstation( + request, metadata + ) + pb_request = workstations.UpdateWorkstationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_workstation(resp) + return resp + + class _UpdateWorkstationCluster(WorkstationsRestStub): + def __hash__(self): + return hash("UpdateWorkstationCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.UpdateWorkstationClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update workstation + cluster method over HTTP. + + Args: + request (~.workstations.UpdateWorkstationClusterRequest): + The request object. Request message for + UpdateWorkstationCluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{workstation_cluster.name=projects/*/locations/*/workstationClusters/*}", + "body": "workstation_cluster", + }, + ] + request, metadata = self._interceptor.pre_update_workstation_cluster( + request, metadata + ) + pb_request = workstations.UpdateWorkstationClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_workstation_cluster(resp) + return resp + + class _UpdateWorkstationConfig(WorkstationsRestStub): + def __hash__(self): + return hash("UpdateWorkstationConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.UpdateWorkstationConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update workstation config method over HTTP. + + Args: + request (~.workstations.UpdateWorkstationConfigRequest): + The request object. Request message for + UpdateWorkstationConfig. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{workstation_config.name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}", + "body": "workstation_config", + }, + ] + request, metadata = self._interceptor.pre_update_workstation_config( + request, metadata + ) + pb_request = workstations.UpdateWorkstationConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_workstation_config(resp) + return resp + + @property + def create_workstation( + self, + ) -> Callable[[workstations.CreateWorkstationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateWorkstation(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_workstation_cluster( + self, + ) -> Callable[ + [workstations.CreateWorkstationClusterRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateWorkstationCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_workstation_config( + self, + ) -> Callable[ + [workstations.CreateWorkstationConfigRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateWorkstationConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_workstation( + self, + ) -> Callable[[workstations.DeleteWorkstationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteWorkstation(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_workstation_cluster( + self, + ) -> Callable[ + [workstations.DeleteWorkstationClusterRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteWorkstationCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_workstation_config( + self, + ) -> Callable[ + [workstations.DeleteWorkstationConfigRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteWorkstationConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_access_token( + self, + ) -> Callable[ + [workstations.GenerateAccessTokenRequest], + workstations.GenerateAccessTokenResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateAccessToken(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_workstation( + self, + ) -> Callable[[workstations.GetWorkstationRequest], workstations.Workstation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetWorkstation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_workstation_cluster( + self, + ) -> Callable[ + [workstations.GetWorkstationClusterRequest], workstations.WorkstationCluster + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetWorkstationCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_workstation_config( + self, + ) -> Callable[ + [workstations.GetWorkstationConfigRequest], workstations.WorkstationConfig + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetWorkstationConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_usable_workstation_configs( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationConfigsRequest], + workstations.ListUsableWorkstationConfigsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListUsableWorkstationConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_usable_workstations( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationsRequest], + workstations.ListUsableWorkstationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListUsableWorkstations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_workstation_clusters( + self, + ) -> Callable[ + [workstations.ListWorkstationClustersRequest], + workstations.ListWorkstationClustersResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListWorkstationClusters(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_workstation_configs( + self, + ) -> Callable[ + [workstations.ListWorkstationConfigsRequest], + workstations.ListWorkstationConfigsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListWorkstationConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_workstations( + self, + ) -> Callable[ + [workstations.ListWorkstationsRequest], workstations.ListWorkstationsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListWorkstations(self._session, self._host, self._interceptor) # type: ignore + + @property + def start_workstation( + self, + ) -> Callable[[workstations.StartWorkstationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StartWorkstation(self._session, self._host, self._interceptor) # type: ignore + + @property + def stop_workstation( + self, + ) -> Callable[[workstations.StopWorkstationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StopWorkstation(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_workstation( + self, + ) -> Callable[[workstations.UpdateWorkstationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateWorkstation(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_workstation_cluster( + self, + ) -> Callable[ + [workstations.UpdateWorkstationClusterRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateWorkstationCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_workstation_config( + self, + ) -> Callable[ + [workstations.UpdateWorkstationConfigRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateWorkstationConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(WorkstationsRestStub): + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:getIamPolicy", + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(WorkstationsRestStub): + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:setIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(WorkstationsRestStub): + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:testIamPermissions", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(WorkstationsRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(WorkstationsRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(WorkstationsRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(WorkstationsRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("WorkstationsRestTransport",) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/types/__init__.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/types/__init__.py new file mode 100644 index 000000000000..2ed11b1f4e97 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/types/__init__.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .workstations import ( + CreateWorkstationClusterRequest, + CreateWorkstationConfigRequest, + CreateWorkstationRequest, + DeleteWorkstationClusterRequest, + DeleteWorkstationConfigRequest, + DeleteWorkstationRequest, + GenerateAccessTokenRequest, + GenerateAccessTokenResponse, + GetWorkstationClusterRequest, + GetWorkstationConfigRequest, + GetWorkstationRequest, + ListUsableWorkstationConfigsRequest, + ListUsableWorkstationConfigsResponse, + ListUsableWorkstationsRequest, + ListUsableWorkstationsResponse, + ListWorkstationClustersRequest, + ListWorkstationClustersResponse, + ListWorkstationConfigsRequest, + ListWorkstationConfigsResponse, + ListWorkstationsRequest, + ListWorkstationsResponse, + OperationMetadata, + StartWorkstationRequest, + StopWorkstationRequest, + UpdateWorkstationClusterRequest, + UpdateWorkstationConfigRequest, + UpdateWorkstationRequest, + Workstation, + WorkstationCluster, + WorkstationConfig, +) + +__all__ = ( + "CreateWorkstationClusterRequest", + "CreateWorkstationConfigRequest", + "CreateWorkstationRequest", + "DeleteWorkstationClusterRequest", + "DeleteWorkstationConfigRequest", + "DeleteWorkstationRequest", + "GenerateAccessTokenRequest", + "GenerateAccessTokenResponse", + "GetWorkstationClusterRequest", + "GetWorkstationConfigRequest", + "GetWorkstationRequest", + "ListUsableWorkstationConfigsRequest", + "ListUsableWorkstationConfigsResponse", + "ListUsableWorkstationsRequest", + "ListUsableWorkstationsResponse", + "ListWorkstationClustersRequest", + "ListWorkstationClustersResponse", + "ListWorkstationConfigsRequest", + "ListWorkstationConfigsResponse", + "ListWorkstationsRequest", + "ListWorkstationsResponse", + "OperationMetadata", + "StartWorkstationRequest", + "StopWorkstationRequest", + "UpdateWorkstationClusterRequest", + "UpdateWorkstationConfigRequest", + "UpdateWorkstationRequest", + "Workstation", + "WorkstationCluster", + "WorkstationConfig", +) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1/types/workstations.py b/packages/google-cloud-workstations/google/cloud/workstations_v1/types/workstations.py new file mode 100644 index 000000000000..3e9272ef2382 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1/types/workstations.py @@ -0,0 +1,1722 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.workstations.v1", + manifest={ + "WorkstationCluster", + "WorkstationConfig", + "Workstation", + "GetWorkstationClusterRequest", + "ListWorkstationClustersRequest", + "ListWorkstationClustersResponse", + "CreateWorkstationClusterRequest", + "UpdateWorkstationClusterRequest", + "DeleteWorkstationClusterRequest", + "GetWorkstationConfigRequest", + "ListWorkstationConfigsRequest", + "ListWorkstationConfigsResponse", + "ListUsableWorkstationConfigsRequest", + "ListUsableWorkstationConfigsResponse", + "CreateWorkstationConfigRequest", + "UpdateWorkstationConfigRequest", + "DeleteWorkstationConfigRequest", + "GetWorkstationRequest", + "ListWorkstationsRequest", + "ListWorkstationsResponse", + "ListUsableWorkstationsRequest", + "ListUsableWorkstationsResponse", + "CreateWorkstationRequest", + "UpdateWorkstationRequest", + "DeleteWorkstationRequest", + "StartWorkstationRequest", + "StopWorkstationRequest", + "GenerateAccessTokenRequest", + "GenerateAccessTokenResponse", + "OperationMetadata", + }, +) + + +class WorkstationCluster(proto.Message): + r"""A grouping of workstation configurations and the associated + workstations in that region. + + Attributes: + name (str): + Full name of this resource. + display_name (str): + Human-readable name for this resource. + uid (str): + Output only. A system-assigned unique + identified for this resource. + reconciling (bool): + Output only. Indicates whether this resource + is currently being updated to match its intended + state. + annotations (MutableMapping[str, str]): + Client-specified annotations. + labels (MutableMapping[str, str]): + Client-specified labels that are applied to + the resource and that are also propagated to the + underlying Compute Engine resources. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was most + recently updated. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was + soft-deleted. + etag (str): + Checksum computed by the server. May be sent + on update and delete requests to make sure that + the client has an up-to-date value before + proceeding. + network (str): + Immutable. Name of the Compute Engine network + in which instances associated with this cluster + will be created. + subnetwork (str): + Immutable. Name of the Compute Engine + subnetwork in which instances associated with + this cluster will be created. Must be part of + the subnetwork specified for this cluster. + control_plane_ip (str): + Output only. The private IP address of the + control plane for this cluster. Workstation VMs + need access to this IP address to work with the + service, so make sure that your firewall rules + allow egress from the workstation VMs to this + address. + private_cluster_config (google.cloud.workstations_v1.types.WorkstationCluster.PrivateClusterConfig): + Configuration for private cluster. + degraded (bool): + Output only. Whether this resource is in degraded mode, in + which case it may require user action to restore full + functionality. Details can be found in the ``conditions`` + field. + conditions (MutableSequence[google.rpc.status_pb2.Status]): + Output only. Status conditions describing the + current resource state. + """ + + class PrivateClusterConfig(proto.Message): + r"""Configuration options for private clusters. + + Attributes: + enable_private_endpoint (bool): + Immutable. Whether Workstations endpoint is + private. + cluster_hostname (str): + Output only. Hostname for the workstation + cluster. This field will be populated only when + private endpoint is enabled. To access + workstations in the cluster, create a new DNS + zone mapping this domain name to an internal IP + address and a forwarding rule mapping that + address to the service attachment. + service_attachment_uri (str): + Output only. Service attachment URI for the workstation + cluster. The service attachemnt is created when private + endpoint is enabled. To access workstations in the cluster, + configure access to the managed service using `Private + Service + Connect `__. + allowed_projects (MutableSequence[str]): + Additional projects that are allowed to + attach to the workstation cluster's service + attachment. By default, the workstation + cluster's project and the VPC host project (if + different) are allowed. + """ + + enable_private_endpoint: bool = proto.Field( + proto.BOOL, + number=1, + ) + cluster_hostname: str = proto.Field( + proto.STRING, + number=2, + ) + service_attachment_uri: str = proto.Field( + proto.STRING, + number=3, + ) + allowed_projects: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=4, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=15, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + ) + network: str = proto.Field( + proto.STRING, + number=10, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=11, + ) + control_plane_ip: str = proto.Field( + proto.STRING, + number=16, + ) + private_cluster_config: PrivateClusterConfig = proto.Field( + proto.MESSAGE, + number=12, + message=PrivateClusterConfig, + ) + degraded: bool = proto.Field( + proto.BOOL, + number=13, + ) + conditions: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message=status_pb2.Status, + ) + + +class WorkstationConfig(proto.Message): + r"""A set of configuration options describing how a workstation + will be run. Workstation configurations are intended to be + shared across multiple workstations. + + Attributes: + name (str): + Full name of this resource. + display_name (str): + Human-readable name for this resource. + uid (str): + Output only. A system-assigned unique + identified for this resource. + reconciling (bool): + Output only. Indicates whether this resource + is currently being updated to match its intended + state. + annotations (MutableMapping[str, str]): + Client-specified annotations. + labels (MutableMapping[str, str]): + Client-specified labels that are applied to + the resource and that are also propagated to the + underlying Compute Engine resources. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was most + recently updated. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was + soft-deleted. + etag (str): + Checksum computed by the server. May be sent + on update and delete requests to make sure that + the client has an up-to-date value before + proceeding. + idle_timeout (google.protobuf.duration_pb2.Duration): + How long to wait before automatically + stopping an instance that hasn't received any + user traffic. A value of 0 indicates that this + instance should never time out due to idleness. + Defaults to 20 minutes. + running_timeout (google.protobuf.duration_pb2.Duration): + How long to wait before automatically stopping a workstation + after it started. A value of 0 indicates that workstations + using this configuration should never time out. Must be + greater than 0 and less than 24 hours if encryption_key is + set. Defaults to 12 hours. + host (google.cloud.workstations_v1.types.WorkstationConfig.Host): + Runtime host for the workstation. + persistent_directories (MutableSequence[google.cloud.workstations_v1.types.WorkstationConfig.PersistentDirectory]): + Directories to persist across workstation + sessions. + container (google.cloud.workstations_v1.types.WorkstationConfig.Container): + Container that will be run for each + workstation using this configuration when that + workstation is started. + encryption_key (google.cloud.workstations_v1.types.WorkstationConfig.CustomerEncryptionKey): + Immutable. Encrypts resources of this + workstation configuration using a + customer-managed encryption key. + If specified, the boot disk of the Compute + Engine instance and the persistent disk are + encrypted using this encryption key. If this + field is not set, the disks are encrypted using + a generated key. Customer-managed encryption + keys do not protect disk metadata. + If the customer-managed encryption key is + rotated, when the workstation instance is + stopped, the system attempts to recreate the + persistent disk with the new version of the key. + Be sure to keep older versions of the key until + the persistent disk is recreated. Otherwise, + data on the persistent disk will be lost. + If the encryption key is revoked, the + workstation session will automatically be + stopped within 7 hours. + + Immutable after the workstation configuration is + created. + degraded (bool): + Output only. Whether this resource is degraded, in which + case it may require user action to restore full + functionality. See also the ``conditions`` field. + conditions (MutableSequence[google.rpc.status_pb2.Status]): + Output only. Status conditions describing the + current resource state. + """ + + class Host(proto.Message): + r"""Runtime host for a workstation. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gce_instance (google.cloud.workstations_v1.types.WorkstationConfig.Host.GceInstance): + Specifies a Compute Engine instance as the + host. + + This field is a member of `oneof`_ ``config``. + """ + + class GceInstance(proto.Message): + r"""A runtime using a Compute Engine instance. + + Attributes: + machine_type (str): + The name of a Compute Engine machine type. + service_account (str): + Email address of the service account used on + VM instances used to support this configuration. + If not set, VMs run with a Google-managed + service account. This service account must have + permission to pull the specified container + image; otherwise, the image must be publicly + accessible. + tags (MutableSequence[str]): + Network tags to add to the Compute Engine + machines backing the Workstations. + pool_size (int): + Number of instances to pool for faster + workstation startup. + pooled_instances (int): + Output only. Number of instances currently + available in the pool for faster workstation + startup. + disable_public_ip_addresses (bool): + Whether instances have no public IP address. + enable_nested_virtualization (bool): + Whether to enable nested virtualization on + instances. + shielded_instance_config (google.cloud.workstations_v1.types.WorkstationConfig.Host.GceInstance.GceShieldedInstanceConfig): + A set of Compute Engine Shielded instance + options. + confidential_instance_config (google.cloud.workstations_v1.types.WorkstationConfig.Host.GceInstance.GceConfidentialInstanceConfig): + A set of Compute Engine Confidential VM + instance options. + boot_disk_size_gb (int): + Size of the boot disk in GB. Defaults to 50. + """ + + class GceShieldedInstanceConfig(proto.Message): + r"""A set of Compute Engine Shielded instance options. + + Attributes: + enable_secure_boot (bool): + Whether the instance has Secure Boot enabled. + enable_vtpm (bool): + Whether the instance has the vTPM enabled. + enable_integrity_monitoring (bool): + Whether the instance has integrity monitoring + enabled. + """ + + enable_secure_boot: bool = proto.Field( + proto.BOOL, + number=1, + ) + enable_vtpm: bool = proto.Field( + proto.BOOL, + number=2, + ) + enable_integrity_monitoring: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class GceConfidentialInstanceConfig(proto.Message): + r"""A set of Compute Engine Confidential VM instance options. + + Attributes: + enable_confidential_compute (bool): + Whether the instance has confidential compute + enabled. + """ + + enable_confidential_compute: bool = proto.Field( + proto.BOOL, + number=1, + ) + + machine_type: str = proto.Field( + proto.STRING, + number=1, + ) + service_account: str = proto.Field( + proto.STRING, + number=2, + ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + pool_size: int = proto.Field( + proto.INT32, + number=5, + ) + pooled_instances: int = proto.Field( + proto.INT32, + number=12, + ) + disable_public_ip_addresses: bool = proto.Field( + proto.BOOL, + number=6, + ) + enable_nested_virtualization: bool = proto.Field( + proto.BOOL, + number=7, + ) + shielded_instance_config: "WorkstationConfig.Host.GceInstance.GceShieldedInstanceConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="WorkstationConfig.Host.GceInstance.GceShieldedInstanceConfig", + ) + confidential_instance_config: "WorkstationConfig.Host.GceInstance.GceConfidentialInstanceConfig" = proto.Field( + proto.MESSAGE, + number=10, + message="WorkstationConfig.Host.GceInstance.GceConfidentialInstanceConfig", + ) + boot_disk_size_gb: int = proto.Field( + proto.INT32, + number=9, + ) + + gce_instance: "WorkstationConfig.Host.GceInstance" = proto.Field( + proto.MESSAGE, + number=1, + oneof="config", + message="WorkstationConfig.Host.GceInstance", + ) + + class PersistentDirectory(proto.Message): + r"""A directory to persist across workstation sessions. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gce_pd (google.cloud.workstations_v1.types.WorkstationConfig.PersistentDirectory.GceRegionalPersistentDisk): + A PersistentDirectory backed by a Compute + Engine persistent disk. + + This field is a member of `oneof`_ ``directory_type``. + mount_path (str): + Location of this directory in the running + workstation. + """ + + class GceRegionalPersistentDisk(proto.Message): + r"""A PersistentDirectory backed by a Compute Engine regional + persistent disk. + + Attributes: + size_gb (int): + Size of the disk in GB. Must be empty if source_snapshot is + set. Defaults to 200. + fs_type (str): + Type of file system that the disk should be formatted with. + The workstation image must support this file system type. + Must be empty if source_snapshot is set. Defaults to ext4. + disk_type (str): + Type of the disk to use. Defaults to + pd-standard. + source_snapshot (str): + Name of the snapshot to use as the source for the disk. If + set, size_gb and fs_type must be empty. + reclaim_policy (google.cloud.workstations_v1.types.WorkstationConfig.PersistentDirectory.GceRegionalPersistentDisk.ReclaimPolicy): + What should happen to the disk after the + workstation is deleted. Defaults to DELETE. + """ + + class ReclaimPolicy(proto.Enum): + r"""Value representing what should happen to the disk after the + workstation is deleted. + + Values: + RECLAIM_POLICY_UNSPECIFIED (0): + Do not use. + DELETE (1): + The persistent disk will be deleted with the + workstation. + RETAIN (2): + The persistent disk will be remain after the + workstation is deleted, and the administrator + must manually delete the disk. + """ + RECLAIM_POLICY_UNSPECIFIED = 0 + DELETE = 1 + RETAIN = 2 + + size_gb: int = proto.Field( + proto.INT32, + number=1, + ) + fs_type: str = proto.Field( + proto.STRING, + number=2, + ) + disk_type: str = proto.Field( + proto.STRING, + number=3, + ) + source_snapshot: str = proto.Field( + proto.STRING, + number=5, + ) + reclaim_policy: "WorkstationConfig.PersistentDirectory.GceRegionalPersistentDisk.ReclaimPolicy" = proto.Field( + proto.ENUM, + number=4, + enum="WorkstationConfig.PersistentDirectory.GceRegionalPersistentDisk.ReclaimPolicy", + ) + + gce_pd: "WorkstationConfig.PersistentDirectory.GceRegionalPersistentDisk" = proto.Field( + proto.MESSAGE, + number=2, + oneof="directory_type", + message="WorkstationConfig.PersistentDirectory.GceRegionalPersistentDisk", + ) + mount_path: str = proto.Field( + proto.STRING, + number=1, + ) + + class Container(proto.Message): + r"""A Docker container. + + Attributes: + image (str): + Docker image defining the container. This + image must be accessible by the service account + specified in the workstation configuration. + command (MutableSequence[str]): + If set, overrides the default ENTRYPOINT + specified by the image. + args (MutableSequence[str]): + Arguments passed to the entrypoint. + env (MutableMapping[str, str]): + Environment variables passed to the + container's entrypoint. + working_dir (str): + If set, overrides the default DIR specified + by the image. + run_as_user (int): + If set, overrides the USER specified in the + image with the given uid. + """ + + image: str = proto.Field( + proto.STRING, + number=1, + ) + command: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + env: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + working_dir: str = proto.Field( + proto.STRING, + number=5, + ) + run_as_user: int = proto.Field( + proto.INT32, + number=6, + ) + + class CustomerEncryptionKey(proto.Message): + r"""A customer-managed encryption key for the Compute Engine + resources of this workstation configuration. + + Attributes: + kms_key (str): + Immutable. The name of the Google Cloud KMS encryption key. + For example, + ``projects/PROJECT_ID/locations/REGION/keyRings/KEY_RING/cryptoKeys/KEY_NAME``. + kms_key_service_account (str): + Immutable. The service account to use with the specified KMS + key. We recommend that you use a separate service account + and follow KMS best practices. For more information, see + `Separation of + duties `__ + and ``gcloud kms keys add-iam-policy-binding`` + ```--member`` `__. + """ + + kms_key: str = proto.Field( + proto.STRING, + number=1, + ) + kms_key_service_account: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=4, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=18, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + ) + idle_timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=10, + message=duration_pb2.Duration, + ) + running_timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=11, + message=duration_pb2.Duration, + ) + host: Host = proto.Field( + proto.MESSAGE, + number=12, + message=Host, + ) + persistent_directories: MutableSequence[PersistentDirectory] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message=PersistentDirectory, + ) + container: Container = proto.Field( + proto.MESSAGE, + number=14, + message=Container, + ) + encryption_key: CustomerEncryptionKey = proto.Field( + proto.MESSAGE, + number=17, + message=CustomerEncryptionKey, + ) + degraded: bool = proto.Field( + proto.BOOL, + number=15, + ) + conditions: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=16, + message=status_pb2.Status, + ) + + +class Workstation(proto.Message): + r"""A single instance of a developer workstation with its own + persistent storage. + + Attributes: + name (str): + Full name of this resource. + display_name (str): + Human-readable name for this resource. + uid (str): + Output only. A system-assigned unique + identified for this resource. + reconciling (bool): + Output only. Indicates whether this resource + is currently being updated to match its intended + state. + annotations (MutableMapping[str, str]): + Client-specified annotations. + labels (MutableMapping[str, str]): + Client-specified labels that are applied to + the resource and that are also propagated to the + underlying Compute Engine resources. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was most + recently updated. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was + soft-deleted. + etag (str): + Checksum computed by the server. May be sent + on update and delete requests to make sure that + the client has an up-to-date value before + proceeding. + state (google.cloud.workstations_v1.types.Workstation.State): + Output only. Current state of the + workstation. + host (str): + Output only. Host to which clients can send HTTPS traffic + that will be received by the workstation. Authorized traffic + will be received to the workstation as HTTP on port 80. To + send traffic to a different port, clients may prefix the + host with the destination port in the format + ``{port}-{host}``. + """ + + class State(proto.Enum): + r"""Whether a workstation is running and ready to receive user + requests. + + Values: + STATE_UNSPECIFIED (0): + Do not use. + STATE_STARTING (1): + The workstation is not yet ready to accept + requests from users but will be soon. + STATE_RUNNING (2): + The workstation is ready to accept requests + from users. + STATE_STOPPING (3): + The workstation is being stopped. + STATE_STOPPED (4): + The workstation is stopped and will not be + able to receive requests until it is started. + """ + STATE_UNSPECIFIED = 0 + STATE_STARTING = 1 + STATE_RUNNING = 2 + STATE_STOPPING = 3 + STATE_STOPPED = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=4, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + ) + state: State = proto.Field( + proto.ENUM, + number=10, + enum=State, + ) + host: str = proto.Field( + proto.STRING, + number=11, + ) + + +class GetWorkstationClusterRequest(proto.Message): + r"""Request message for GetWorkstationCluster. + + Attributes: + name (str): + Required. Name of the requested resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListWorkstationClustersRequest(proto.Message): + r"""Request message for ListWorkstationClusters. + + Attributes: + parent (str): + Required. Parent resource name. + page_size (int): + Maximum number of items to return. + page_token (str): + next_page_token value returned from a previous List request, + if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListWorkstationClustersResponse(proto.Message): + r"""Response message for ListWorkstationClusters. + + Attributes: + workstation_clusters (MutableSequence[google.cloud.workstations_v1.types.WorkstationCluster]): + The requested workstation clusters. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Unreachable resources. + """ + + @property + def raw_page(self): + return self + + workstation_clusters: MutableSequence["WorkstationCluster"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="WorkstationCluster", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateWorkstationClusterRequest(proto.Message): + r"""Message for creating a CreateWorkstationCluster. + + Attributes: + parent (str): + Required. Parent resource name. + workstation_cluster_id (str): + Required. ID to use for the workstation + cluster. + workstation_cluster (google.cloud.workstations_v1.types.WorkstationCluster): + Required. Workstation cluster to create. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + workstation_cluster_id: str = proto.Field( + proto.STRING, + number=2, + ) + workstation_cluster: "WorkstationCluster" = proto.Field( + proto.MESSAGE, + number=3, + message="WorkstationCluster", + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateWorkstationClusterRequest(proto.Message): + r"""Request message for UpdateWorkstationCluster. + + Attributes: + workstation_cluster (google.cloud.workstations_v1.types.WorkstationCluster): + Required. Workstation cluster to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask that specifies which fields in + the workstation cluster should be updated. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + allow_missing (bool): + If set, and the workstation cluster is not found, a new + workstation cluster will be created. In this situation, + update_mask is ignored. + """ + + workstation_cluster: "WorkstationCluster" = proto.Field( + proto.MESSAGE, + number=1, + message="WorkstationCluster", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class DeleteWorkstationClusterRequest(proto.Message): + r"""Message for deleting a workstation cluster. + + Attributes: + name (str): + Required. Name of the workstation cluster to + delete. + validate_only (bool): + If set, validate the request and preview the + review, but do not apply it. + etag (str): + If set, the request will be rejected if the + latest version of the workstation cluster on the + server does not have this ETag. + force (bool): + If set, any workstation configurations and + workstations in the workstation cluster are also + deleted. Otherwise, the request only works if + the workstation cluster has no configurations or + workstations. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + force: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetWorkstationConfigRequest(proto.Message): + r"""Request message for GetWorkstationConfig. + + Attributes: + name (str): + Required. Name of the requested resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListWorkstationConfigsRequest(proto.Message): + r"""Request message for ListWorkstationConfigs. + + Attributes: + parent (str): + Required. Parent resource name. + page_size (int): + Maximum number of items to return. + page_token (str): + next_page_token value returned from a previous List request, + if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListWorkstationConfigsResponse(proto.Message): + r"""Response message for ListWorkstationConfigs. + + Attributes: + workstation_configs (MutableSequence[google.cloud.workstations_v1.types.WorkstationConfig]): + The requested configs. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Unreachable resources. + """ + + @property + def raw_page(self): + return self + + workstation_configs: MutableSequence["WorkstationConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="WorkstationConfig", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class ListUsableWorkstationConfigsRequest(proto.Message): + r"""Request message for ListUsableWorkstationConfigs. + + Attributes: + parent (str): + Required. Parent resource name. + page_size (int): + Maximum number of items to return. + page_token (str): + next_page_token value returned from a previous List request, + if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListUsableWorkstationConfigsResponse(proto.Message): + r"""Response message for ListUsableWorkstationConfigs. + + Attributes: + workstation_configs (MutableSequence[google.cloud.workstations_v1.types.WorkstationConfig]): + The requested configs. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Unreachable resources. + """ + + @property + def raw_page(self): + return self + + workstation_configs: MutableSequence["WorkstationConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="WorkstationConfig", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateWorkstationConfigRequest(proto.Message): + r"""Message for creating a CreateWorkstationConfig. + + Attributes: + parent (str): + Required. Parent resource name. + workstation_config_id (str): + Required. ID to use for the workstation + configuration. + workstation_config (google.cloud.workstations_v1.types.WorkstationConfig): + Required. Config to create. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + workstation_config_id: str = proto.Field( + proto.STRING, + number=2, + ) + workstation_config: "WorkstationConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="WorkstationConfig", + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateWorkstationConfigRequest(proto.Message): + r"""Request message for UpdateWorkstationConfig. + + Attributes: + workstation_config (google.cloud.workstations_v1.types.WorkstationConfig): + Required. Config to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask specifying which fields in the + workstation configuration should be updated. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + allow_missing (bool): + If set and the workstation configuration is not found, a new + workstation configuration will be created. In this + situation, update_mask is ignored. + """ + + workstation_config: "WorkstationConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="WorkstationConfig", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class DeleteWorkstationConfigRequest(proto.Message): + r"""Message for deleting a workstation configuration. + + Attributes: + name (str): + Required. Name of the workstation + configuration to delete. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + etag (str): + If set, the request is rejected if the latest + version of the workstation configuration on the + server does not have this ETag. + force (bool): + If set, any workstations in the workstation + configuration are also deleted. Otherwise, the + request works only if the workstation + configuration has no workstations. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + force: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetWorkstationRequest(proto.Message): + r"""Request message for GetWorkstation. + + Attributes: + name (str): + Required. Name of the requested resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListWorkstationsRequest(proto.Message): + r"""Request message for ListWorkstations. + + Attributes: + parent (str): + Required. Parent resource name. + page_size (int): + Maximum number of items to return. + page_token (str): + next_page_token value returned from a previous List request, + if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListWorkstationsResponse(proto.Message): + r"""Response message for ListWorkstations. + + Attributes: + workstations (MutableSequence[google.cloud.workstations_v1.types.Workstation]): + The requested workstations. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Unreachable resources. + """ + + @property + def raw_page(self): + return self + + workstations: MutableSequence["Workstation"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Workstation", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class ListUsableWorkstationsRequest(proto.Message): + r"""Request message for ListUsableWorkstations. + + Attributes: + parent (str): + Required. Parent resource name. + page_size (int): + Maximum number of items to return. + page_token (str): + next_page_token value returned from a previous List request, + if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListUsableWorkstationsResponse(proto.Message): + r"""Response message for ListUsableWorkstations. + + Attributes: + workstations (MutableSequence[google.cloud.workstations_v1.types.Workstation]): + The requested workstations. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Unreachable resources. + """ + + @property + def raw_page(self): + return self + + workstations: MutableSequence["Workstation"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Workstation", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateWorkstationRequest(proto.Message): + r"""Message for creating a CreateWorkstation. + + Attributes: + parent (str): + Required. Parent resource name. + workstation_id (str): + Required. ID to use for the workstation. + workstation (google.cloud.workstations_v1.types.Workstation): + Required. Workstation to create. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + workstation_id: str = proto.Field( + proto.STRING, + number=2, + ) + workstation: "Workstation" = proto.Field( + proto.MESSAGE, + number=3, + message="Workstation", + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateWorkstationRequest(proto.Message): + r"""Request message for UpdateWorkstation. + + Attributes: + workstation (google.cloud.workstations_v1.types.Workstation): + Required. Workstation to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask specifying which fields in the + workstation configuration should be updated. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + allow_missing (bool): + If set and the workstation configuration is not found, a new + workstation configuration is created. In this situation, + update_mask is ignored. + """ + + workstation: "Workstation" = proto.Field( + proto.MESSAGE, + number=1, + message="Workstation", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class DeleteWorkstationRequest(proto.Message): + r"""Request message for DeleteWorkstation. + + Attributes: + name (str): + Required. Name of the workstation to delete. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + etag (str): + If set, the request will be rejected if the + latest version of the workstation on the server + does not have this ETag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + + +class StartWorkstationRequest(proto.Message): + r"""Request message for StartWorkstation. + + Attributes: + name (str): + Required. Name of the workstation to start. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + etag (str): + If set, the request will be rejected if the + latest version of the workstation on the server + does not have this ETag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + + +class StopWorkstationRequest(proto.Message): + r"""Request message for StopWorkstation. + + Attributes: + name (str): + Required. Name of the workstation to stop. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + etag (str): + If set, the request will be rejected if the + latest version of the workstation on the server + does not have this ETag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GenerateAccessTokenRequest(proto.Message): + r"""Request message for GenerateAccessToken. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Desired expiration time of the access token. + This value must be at most 24 hours in the + future. If a value is not specified, the token's + expiration time will be set to a default value + of 1 hour in the future. + + This field is a member of `oneof`_ ``expiration``. + ttl (google.protobuf.duration_pb2.Duration): + Desired lifetime duration of the access + token. This value must be at most 24 hours. If a + value is not specified, the token's lifetime + will be set to a default value of 1 hour. + + This field is a member of `oneof`_ ``expiration``. + workstation (str): + Required. Name of the workstation for which + the access token should be generated. + """ + + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + oneof="expiration", + message=timestamp_pb2.Timestamp, + ) + ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + oneof="expiration", + message=duration_pb2.Duration, + ) + workstation: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GenerateAccessTokenResponse(proto.Message): + r"""Response message for GenerateAccessToken. + + Attributes: + access_token (str): + The generated bearer access token. To use this token, + include it in an Authorization header of an HTTP request + sent to the associated workstation's hostname—for example, + ``Authorization: Bearer ``. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Time at which the generated token will + expire. + """ + + access_token: str = proto.Field( + proto.STRING, + number=1, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class OperationMetadata(proto.Message): + r"""Metadata for long-running operations. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time that the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time that the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has + requested cancellation of the operation. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/__init__.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/__init__.py new file mode 100644 index 000000000000..e8dd5402a127 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/__init__.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.workstations_v1beta import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.workstations import WorkstationsAsyncClient, WorkstationsClient +from .types.workstations import ( + CreateWorkstationClusterRequest, + CreateWorkstationConfigRequest, + CreateWorkstationRequest, + DeleteWorkstationClusterRequest, + DeleteWorkstationConfigRequest, + DeleteWorkstationRequest, + GenerateAccessTokenRequest, + GenerateAccessTokenResponse, + GetWorkstationClusterRequest, + GetWorkstationConfigRequest, + GetWorkstationRequest, + ListUsableWorkstationConfigsRequest, + ListUsableWorkstationConfigsResponse, + ListUsableWorkstationsRequest, + ListUsableWorkstationsResponse, + ListWorkstationClustersRequest, + ListWorkstationClustersResponse, + ListWorkstationConfigsRequest, + ListWorkstationConfigsResponse, + ListWorkstationsRequest, + ListWorkstationsResponse, + OperationMetadata, + StartWorkstationRequest, + StopWorkstationRequest, + UpdateWorkstationClusterRequest, + UpdateWorkstationConfigRequest, + UpdateWorkstationRequest, + Workstation, + WorkstationCluster, + WorkstationConfig, +) + +__all__ = ( + "WorkstationsAsyncClient", + "CreateWorkstationClusterRequest", + "CreateWorkstationConfigRequest", + "CreateWorkstationRequest", + "DeleteWorkstationClusterRequest", + "DeleteWorkstationConfigRequest", + "DeleteWorkstationRequest", + "GenerateAccessTokenRequest", + "GenerateAccessTokenResponse", + "GetWorkstationClusterRequest", + "GetWorkstationConfigRequest", + "GetWorkstationRequest", + "ListUsableWorkstationConfigsRequest", + "ListUsableWorkstationConfigsResponse", + "ListUsableWorkstationsRequest", + "ListUsableWorkstationsResponse", + "ListWorkstationClustersRequest", + "ListWorkstationClustersResponse", + "ListWorkstationConfigsRequest", + "ListWorkstationConfigsResponse", + "ListWorkstationsRequest", + "ListWorkstationsResponse", + "OperationMetadata", + "StartWorkstationRequest", + "StopWorkstationRequest", + "UpdateWorkstationClusterRequest", + "UpdateWorkstationConfigRequest", + "UpdateWorkstationRequest", + "Workstation", + "WorkstationCluster", + "WorkstationConfig", + "WorkstationsClient", +) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/gapic_metadata.json b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/gapic_metadata.json new file mode 100644 index 000000000000..cf8a80968ec1 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/gapic_metadata.json @@ -0,0 +1,328 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.workstations_v1beta", + "protoPackage": "google.cloud.workstations.v1beta", + "schema": "1.0", + "services": { + "Workstations": { + "clients": { + "grpc": { + "libraryClient": "WorkstationsClient", + "rpcs": { + "CreateWorkstation": { + "methods": [ + "create_workstation" + ] + }, + "CreateWorkstationCluster": { + "methods": [ + "create_workstation_cluster" + ] + }, + "CreateWorkstationConfig": { + "methods": [ + "create_workstation_config" + ] + }, + "DeleteWorkstation": { + "methods": [ + "delete_workstation" + ] + }, + "DeleteWorkstationCluster": { + "methods": [ + "delete_workstation_cluster" + ] + }, + "DeleteWorkstationConfig": { + "methods": [ + "delete_workstation_config" + ] + }, + "GenerateAccessToken": { + "methods": [ + "generate_access_token" + ] + }, + "GetWorkstation": { + "methods": [ + "get_workstation" + ] + }, + "GetWorkstationCluster": { + "methods": [ + "get_workstation_cluster" + ] + }, + "GetWorkstationConfig": { + "methods": [ + "get_workstation_config" + ] + }, + "ListUsableWorkstationConfigs": { + "methods": [ + "list_usable_workstation_configs" + ] + }, + "ListUsableWorkstations": { + "methods": [ + "list_usable_workstations" + ] + }, + "ListWorkstationClusters": { + "methods": [ + "list_workstation_clusters" + ] + }, + "ListWorkstationConfigs": { + "methods": [ + "list_workstation_configs" + ] + }, + "ListWorkstations": { + "methods": [ + "list_workstations" + ] + }, + "StartWorkstation": { + "methods": [ + "start_workstation" + ] + }, + "StopWorkstation": { + "methods": [ + "stop_workstation" + ] + }, + "UpdateWorkstation": { + "methods": [ + "update_workstation" + ] + }, + "UpdateWorkstationCluster": { + "methods": [ + "update_workstation_cluster" + ] + }, + "UpdateWorkstationConfig": { + "methods": [ + "update_workstation_config" + ] + } + } + }, + "grpc-async": { + "libraryClient": "WorkstationsAsyncClient", + "rpcs": { + "CreateWorkstation": { + "methods": [ + "create_workstation" + ] + }, + "CreateWorkstationCluster": { + "methods": [ + "create_workstation_cluster" + ] + }, + "CreateWorkstationConfig": { + "methods": [ + "create_workstation_config" + ] + }, + "DeleteWorkstation": { + "methods": [ + "delete_workstation" + ] + }, + "DeleteWorkstationCluster": { + "methods": [ + "delete_workstation_cluster" + ] + }, + "DeleteWorkstationConfig": { + "methods": [ + "delete_workstation_config" + ] + }, + "GenerateAccessToken": { + "methods": [ + "generate_access_token" + ] + }, + "GetWorkstation": { + "methods": [ + "get_workstation" + ] + }, + "GetWorkstationCluster": { + "methods": [ + "get_workstation_cluster" + ] + }, + "GetWorkstationConfig": { + "methods": [ + "get_workstation_config" + ] + }, + "ListUsableWorkstationConfigs": { + "methods": [ + "list_usable_workstation_configs" + ] + }, + "ListUsableWorkstations": { + "methods": [ + "list_usable_workstations" + ] + }, + "ListWorkstationClusters": { + "methods": [ + "list_workstation_clusters" + ] + }, + "ListWorkstationConfigs": { + "methods": [ + "list_workstation_configs" + ] + }, + "ListWorkstations": { + "methods": [ + "list_workstations" + ] + }, + "StartWorkstation": { + "methods": [ + "start_workstation" + ] + }, + "StopWorkstation": { + "methods": [ + "stop_workstation" + ] + }, + "UpdateWorkstation": { + "methods": [ + "update_workstation" + ] + }, + "UpdateWorkstationCluster": { + "methods": [ + "update_workstation_cluster" + ] + }, + "UpdateWorkstationConfig": { + "methods": [ + "update_workstation_config" + ] + } + } + }, + "rest": { + "libraryClient": "WorkstationsClient", + "rpcs": { + "CreateWorkstation": { + "methods": [ + "create_workstation" + ] + }, + "CreateWorkstationCluster": { + "methods": [ + "create_workstation_cluster" + ] + }, + "CreateWorkstationConfig": { + "methods": [ + "create_workstation_config" + ] + }, + "DeleteWorkstation": { + "methods": [ + "delete_workstation" + ] + }, + "DeleteWorkstationCluster": { + "methods": [ + "delete_workstation_cluster" + ] + }, + "DeleteWorkstationConfig": { + "methods": [ + "delete_workstation_config" + ] + }, + "GenerateAccessToken": { + "methods": [ + "generate_access_token" + ] + }, + "GetWorkstation": { + "methods": [ + "get_workstation" + ] + }, + "GetWorkstationCluster": { + "methods": [ + "get_workstation_cluster" + ] + }, + "GetWorkstationConfig": { + "methods": [ + "get_workstation_config" + ] + }, + "ListUsableWorkstationConfigs": { + "methods": [ + "list_usable_workstation_configs" + ] + }, + "ListUsableWorkstations": { + "methods": [ + "list_usable_workstations" + ] + }, + "ListWorkstationClusters": { + "methods": [ + "list_workstation_clusters" + ] + }, + "ListWorkstationConfigs": { + "methods": [ + "list_workstation_configs" + ] + }, + "ListWorkstations": { + "methods": [ + "list_workstations" + ] + }, + "StartWorkstation": { + "methods": [ + "start_workstation" + ] + }, + "StopWorkstation": { + "methods": [ + "stop_workstation" + ] + }, + "UpdateWorkstation": { + "methods": [ + "update_workstation" + ] + }, + "UpdateWorkstationCluster": { + "methods": [ + "update_workstation_cluster" + ] + }, + "UpdateWorkstationConfig": { + "methods": [ + "update_workstation_config" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/gapic_version.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/gapic_version.py new file mode 100644 index 000000000000..c2ac4ad75f9b --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.4.0" # {x-release-please-version} diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/py.typed b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/py.typed new file mode 100644 index 000000000000..04170223dff9 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-workstations package uses inline types. diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/__init__.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/__init__.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/__init__.py new file mode 100644 index 000000000000..2d2ad17879bd --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import WorkstationsAsyncClient +from .client import WorkstationsClient + +__all__ = ( + "WorkstationsClient", + "WorkstationsAsyncClient", +) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/async_client.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/async_client.py new file mode 100644 index 000000000000..43f845ab4e1c --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/async_client.py @@ -0,0 +1,3227 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.workstations_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + +from google.cloud.workstations_v1beta.services.workstations import pagers +from google.cloud.workstations_v1beta.types import workstations + +from .client import WorkstationsClient +from .transports.base import DEFAULT_CLIENT_INFO, WorkstationsTransport +from .transports.grpc_asyncio import WorkstationsGrpcAsyncIOTransport + + +class WorkstationsAsyncClient: + """Service for interacting with Cloud Workstations.""" + + _client: WorkstationsClient + + DEFAULT_ENDPOINT = WorkstationsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = WorkstationsClient.DEFAULT_MTLS_ENDPOINT + + workstation_path = staticmethod(WorkstationsClient.workstation_path) + parse_workstation_path = staticmethod(WorkstationsClient.parse_workstation_path) + workstation_cluster_path = staticmethod(WorkstationsClient.workstation_cluster_path) + parse_workstation_cluster_path = staticmethod( + WorkstationsClient.parse_workstation_cluster_path + ) + workstation_config_path = staticmethod(WorkstationsClient.workstation_config_path) + parse_workstation_config_path = staticmethod( + WorkstationsClient.parse_workstation_config_path + ) + common_billing_account_path = staticmethod( + WorkstationsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + WorkstationsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(WorkstationsClient.common_folder_path) + parse_common_folder_path = staticmethod(WorkstationsClient.parse_common_folder_path) + common_organization_path = staticmethod(WorkstationsClient.common_organization_path) + parse_common_organization_path = staticmethod( + WorkstationsClient.parse_common_organization_path + ) + common_project_path = staticmethod(WorkstationsClient.common_project_path) + parse_common_project_path = staticmethod( + WorkstationsClient.parse_common_project_path + ) + common_location_path = staticmethod(WorkstationsClient.common_location_path) + parse_common_location_path = staticmethod( + WorkstationsClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkstationsAsyncClient: The constructed client. + """ + return WorkstationsClient.from_service_account_info.__func__(WorkstationsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkstationsAsyncClient: The constructed client. + """ + return WorkstationsClient.from_service_account_file.__func__(WorkstationsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return WorkstationsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> WorkstationsTransport: + """Returns the transport used by the client instance. + + Returns: + WorkstationsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(WorkstationsClient).get_transport_class, type(WorkstationsClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, WorkstationsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the workstations client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.WorkstationsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = WorkstationsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_workstation_cluster( + self, + request: Optional[ + Union[workstations.GetWorkstationClusterRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.WorkstationCluster: + r"""Returns the requested workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_get_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.GetWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workstation_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.GetWorkstationClusterRequest, dict]]): + The request object. Request message for + GetWorkstationCluster. + name (:class:`str`): + Required. Name of the requested + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.types.WorkstationCluster: + A grouping of workstation + configurations and the associated + workstations in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.GetWorkstationClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_workstation_cluster, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_workstation_clusters( + self, + request: Optional[ + Union[workstations.ListWorkstationClustersRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkstationClustersAsyncPager: + r"""Returns all workstation clusters in the specified + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_list_workstation_clusters(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListWorkstationClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_clusters(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.ListWorkstationClustersRequest, dict]]): + The request object. Request message for + ListWorkstationClusters. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.services.workstations.pagers.ListWorkstationClustersAsyncPager: + Response message for + ListWorkstationClusters. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.ListWorkstationClustersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_workstation_clusters, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListWorkstationClustersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_workstation_cluster( + self, + request: Optional[ + Union[workstations.CreateWorkstationClusterRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + workstation_cluster: Optional[workstations.WorkstationCluster] = None, + workstation_cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_create_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.CreateWorkstationClusterRequest( + parent="parent_value", + workstation_cluster_id="workstation_cluster_id_value", + ) + + # Make the request + operation = client.create_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.CreateWorkstationClusterRequest, dict]]): + The request object. Message for creating a + CreateWorkstationCluster. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_cluster (:class:`google.cloud.workstations_v1beta.types.WorkstationCluster`): + Required. Workstation cluster to + create. + + This corresponds to the ``workstation_cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_cluster_id (:class:`str`): + Required. ID to use for the + workstation cluster. + + This corresponds to the ``workstation_cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1beta.types.WorkstationCluster` A grouping of workstation configurations and the associated workstations + in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, workstation_cluster, workstation_cluster_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.CreateWorkstationClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workstation_cluster is not None: + request.workstation_cluster = workstation_cluster + if workstation_cluster_id is not None: + request.workstation_cluster_id = workstation_cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_workstation_cluster, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.WorkstationCluster, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_workstation_cluster( + self, + request: Optional[ + Union[workstations.UpdateWorkstationClusterRequest, dict] + ] = None, + *, + workstation_cluster: Optional[workstations.WorkstationCluster] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an existing workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_update_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.UpdateWorkstationClusterRequest( + ) + + # Make the request + operation = client.update_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.UpdateWorkstationClusterRequest, dict]]): + The request object. Request message for + UpdateWorkstationCluster. + workstation_cluster (:class:`google.cloud.workstations_v1beta.types.WorkstationCluster`): + Required. Workstation cluster to + update. + + This corresponds to the ``workstation_cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask that specifies which + fields in the workstation cluster should + be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1beta.types.WorkstationCluster` A grouping of workstation configurations and the associated workstations + in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation_cluster, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.UpdateWorkstationClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation_cluster is not None: + request.workstation_cluster = workstation_cluster + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_workstation_cluster, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation_cluster.name", request.workstation_cluster.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.WorkstationCluster, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_workstation_cluster( + self, + request: Optional[ + Union[workstations.DeleteWorkstationClusterRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes the specified workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_delete_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.DeleteWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.DeleteWorkstationClusterRequest, dict]]): + The request object. Message for deleting a workstation + cluster. + name (:class:`str`): + Required. Name of the workstation + cluster to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1beta.types.WorkstationCluster` A grouping of workstation configurations and the associated workstations + in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.DeleteWorkstationClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_workstation_cluster, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.WorkstationCluster, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_workstation_config( + self, + request: Optional[Union[workstations.GetWorkstationConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.WorkstationConfig: + r"""Returns the requested workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_get_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.GetWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workstation_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.GetWorkstationConfigRequest, dict]]): + The request object. Request message for + GetWorkstationConfig. + name (:class:`str`): + Required. Name of the requested + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.types.WorkstationConfig: + A set of configuration options + describing how a workstation will be + run. Workstation configurations are + intended to be shared across multiple + workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.GetWorkstationConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_workstation_config, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_workstation_configs( + self, + request: Optional[ + Union[workstations.ListWorkstationConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkstationConfigsAsyncPager: + r"""Returns all workstation configurations in the + specified cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_list_workstation_configs(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.ListWorkstationConfigsRequest, dict]]): + The request object. Request message for + ListWorkstationConfigs. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.services.workstations.pagers.ListWorkstationConfigsAsyncPager: + Response message for + ListWorkstationConfigs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.ListWorkstationConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_workstation_configs, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListWorkstationConfigsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_usable_workstation_configs( + self, + request: Optional[ + Union[workstations.ListUsableWorkstationConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsableWorkstationConfigsAsyncPager: + r"""Returns all workstation configurations in the + specified cluster on which the caller has the + "workstations.workstation.create" permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_list_usable_workstation_configs(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListUsableWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstation_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.ListUsableWorkstationConfigsRequest, dict]]): + The request object. Request message for + ListUsableWorkstationConfigs. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.services.workstations.pagers.ListUsableWorkstationConfigsAsyncPager: + Response message for + ListUsableWorkstationConfigs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.ListUsableWorkstationConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_usable_workstation_configs, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListUsableWorkstationConfigsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_workstation_config( + self, + request: Optional[ + Union[workstations.CreateWorkstationConfigRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + workstation_config: Optional[workstations.WorkstationConfig] = None, + workstation_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_create_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.CreateWorkstationConfigRequest( + parent="parent_value", + workstation_config_id="workstation_config_id_value", + ) + + # Make the request + operation = client.create_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.CreateWorkstationConfigRequest, dict]]): + The request object. Message for creating a + CreateWorkstationConfig. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_config (:class:`google.cloud.workstations_v1beta.types.WorkstationConfig`): + Required. Config to create. + This corresponds to the ``workstation_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_config_id (:class:`str`): + Required. ID to use for the + workstation configuration. + + This corresponds to the ``workstation_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1beta.types.WorkstationConfig` A set of configuration options describing how a workstation will be run. + Workstation configurations are intended to be shared + across multiple workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, workstation_config, workstation_config_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.CreateWorkstationConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workstation_config is not None: + request.workstation_config = workstation_config + if workstation_config_id is not None: + request.workstation_config_id = workstation_config_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_workstation_config, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.WorkstationConfig, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_workstation_config( + self, + request: Optional[ + Union[workstations.UpdateWorkstationConfigRequest, dict] + ] = None, + *, + workstation_config: Optional[workstations.WorkstationConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an existing workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_update_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.UpdateWorkstationConfigRequest( + ) + + # Make the request + operation = client.update_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.UpdateWorkstationConfigRequest, dict]]): + The request object. Request message for + UpdateWorkstationConfig. + workstation_config (:class:`google.cloud.workstations_v1beta.types.WorkstationConfig`): + Required. Config to update. + This corresponds to the ``workstation_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask specifying which + fields in the workstation configuration + should be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1beta.types.WorkstationConfig` A set of configuration options describing how a workstation will be run. + Workstation configurations are intended to be shared + across multiple workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.UpdateWorkstationConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation_config is not None: + request.workstation_config = workstation_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_workstation_config, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation_config.name", request.workstation_config.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.WorkstationConfig, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_workstation_config( + self, + request: Optional[ + Union[workstations.DeleteWorkstationConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes the specified workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_delete_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.DeleteWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.DeleteWorkstationConfigRequest, dict]]): + The request object. Message for deleting a workstation + configuration. + name (:class:`str`): + Required. Name of the workstation + configuration to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1beta.types.WorkstationConfig` A set of configuration options describing how a workstation will be run. + Workstation configurations are intended to be shared + across multiple workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.DeleteWorkstationConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_workstation_config, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.WorkstationConfig, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_workstation( + self, + request: Optional[Union[workstations.GetWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.Workstation: + r"""Returns the requested workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_get_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.GetWorkstationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workstation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.GetWorkstationRequest, dict]]): + The request object. Request message for GetWorkstation. + name (:class:`str`): + Required. Name of the requested + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.types.Workstation: + A single instance of a developer + workstation with its own persistent + storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.GetWorkstationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_workstation, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_workstations( + self, + request: Optional[Union[workstations.ListWorkstationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkstationsAsyncPager: + r"""Returns all Workstations using the specified + workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_list_workstations(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.ListWorkstationsRequest, dict]]): + The request object. Request message for ListWorkstations. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.services.workstations.pagers.ListWorkstationsAsyncPager: + Response message for + ListWorkstations. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.ListWorkstationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_workstations, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListWorkstationsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_usable_workstations( + self, + request: Optional[ + Union[workstations.ListUsableWorkstationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsableWorkstationsAsyncPager: + r"""Returns all workstations using the specified + workstation configuration on which the caller has the + "workstations.workstations.use" permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_list_usable_workstations(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListUsableWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.ListUsableWorkstationsRequest, dict]]): + The request object. Request message for + ListUsableWorkstations. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.services.workstations.pagers.ListUsableWorkstationsAsyncPager: + Response message for + ListUsableWorkstations. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.ListUsableWorkstationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_usable_workstations, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListUsableWorkstationsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_workstation( + self, + request: Optional[Union[workstations.CreateWorkstationRequest, dict]] = None, + *, + parent: Optional[str] = None, + workstation: Optional[workstations.Workstation] = None, + workstation_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_create_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.CreateWorkstationRequest( + parent="parent_value", + workstation_id="workstation_id_value", + ) + + # Make the request + operation = client.create_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.CreateWorkstationRequest, dict]]): + The request object. Message for creating a + CreateWorkstation. + parent (:class:`str`): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation (:class:`google.cloud.workstations_v1beta.types.Workstation`): + Required. Workstation to create. + This corresponds to the ``workstation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_id (:class:`str`): + Required. ID to use for the + workstation. + + This corresponds to the ``workstation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1beta.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, workstation, workstation_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.CreateWorkstationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workstation is not None: + request.workstation = workstation + if workstation_id is not None: + request.workstation_id = workstation_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_workstation, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_workstation( + self, + request: Optional[Union[workstations.UpdateWorkstationRequest, dict]] = None, + *, + workstation: Optional[workstations.Workstation] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an existing workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_update_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.UpdateWorkstationRequest( + ) + + # Make the request + operation = client.update_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.UpdateWorkstationRequest, dict]]): + The request object. Request message for + UpdateWorkstation. + workstation (:class:`google.cloud.workstations_v1beta.types.Workstation`): + Required. Workstation to update. + This corresponds to the ``workstation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask specifying which + fields in the workstation configuration + should be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1beta.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.UpdateWorkstationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation is not None: + request.workstation = workstation + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_workstation, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation.name", request.workstation.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_workstation( + self, + request: Optional[Union[workstations.DeleteWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes the specified workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_delete_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.DeleteWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.DeleteWorkstationRequest, dict]]): + The request object. Request message for + DeleteWorkstation. + name (:class:`str`): + Required. Name of the workstation to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1beta.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.DeleteWorkstationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_workstation, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def start_workstation( + self, + request: Optional[Union[workstations.StartWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts running a workstation so that users can + connect to it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_start_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.StartWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.start_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.StartWorkstationRequest, dict]]): + The request object. Request message for StartWorkstation. + name (:class:`str`): + Required. Name of the workstation to + start. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1beta.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.StartWorkstationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.start_workstation, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def stop_workstation( + self, + request: Optional[Union[workstations.StopWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Stops running a workstation, reducing costs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_stop_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.StopWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.stop_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.StopWorkstationRequest, dict]]): + The request object. Request message for StopWorkstation. + name (:class:`str`): + Required. Name of the workstation to + stop. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1beta.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.StopWorkstationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.stop_workstation, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def generate_access_token( + self, + request: Optional[Union[workstations.GenerateAccessTokenRequest, dict]] = None, + *, + workstation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.GenerateAccessTokenResponse: + r"""Returns a short-lived credential that can be used to + send authenticated and authorized traffic to a + workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + async def sample_generate_access_token(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.GenerateAccessTokenRequest( + workstation="workstation_value", + ) + + # Make the request + response = await client.generate_access_token(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.workstations_v1beta.types.GenerateAccessTokenRequest, dict]]): + The request object. Request message for + GenerateAccessToken. + workstation (:class:`str`): + Required. Name of the workstation for + which the access token should be + generated. + + This corresponds to the ``workstation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.types.GenerateAccessTokenResponse: + Response message for + GenerateAccessToken. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workstations.GenerateAccessTokenRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation is not None: + request.workstation = workstation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_access_token, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation", request.workstation),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("WorkstationsAsyncClient",) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/client.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/client.py new file mode 100644 index 000000000000..1afe41098084 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/client.py @@ -0,0 +1,3442 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.workstations_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + +from google.cloud.workstations_v1beta.services.workstations import pagers +from google.cloud.workstations_v1beta.types import workstations + +from .transports.base import DEFAULT_CLIENT_INFO, WorkstationsTransport +from .transports.grpc import WorkstationsGrpcTransport +from .transports.grpc_asyncio import WorkstationsGrpcAsyncIOTransport +from .transports.rest import WorkstationsRestTransport + + +class WorkstationsClientMeta(type): + """Metaclass for the Workstations client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[WorkstationsTransport]] + _transport_registry["grpc"] = WorkstationsGrpcTransport + _transport_registry["grpc_asyncio"] = WorkstationsGrpcAsyncIOTransport + _transport_registry["rest"] = WorkstationsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[WorkstationsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class WorkstationsClient(metaclass=WorkstationsClientMeta): + """Service for interacting with Cloud Workstations.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "workstations.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkstationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkstationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> WorkstationsTransport: + """Returns the transport used by the client instance. + + Returns: + WorkstationsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def workstation_path( + project: str, + location: str, + workstation_cluster: str, + workstation_config: str, + workstation: str, + ) -> str: + """Returns a fully-qualified workstation string.""" + return "projects/{project}/locations/{location}/workstationClusters/{workstation_cluster}/workstationConfigs/{workstation_config}/workstations/{workstation}".format( + project=project, + location=location, + workstation_cluster=workstation_cluster, + workstation_config=workstation_config, + workstation=workstation, + ) + + @staticmethod + def parse_workstation_path(path: str) -> Dict[str, str]: + """Parses a workstation path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workstationClusters/(?P.+?)/workstationConfigs/(?P.+?)/workstations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def workstation_cluster_path( + project: str, + location: str, + workstation_cluster: str, + ) -> str: + """Returns a fully-qualified workstation_cluster string.""" + return "projects/{project}/locations/{location}/workstationClusters/{workstation_cluster}".format( + project=project, + location=location, + workstation_cluster=workstation_cluster, + ) + + @staticmethod + def parse_workstation_cluster_path(path: str) -> Dict[str, str]: + """Parses a workstation_cluster path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workstationClusters/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def workstation_config_path( + project: str, + location: str, + workstation_cluster: str, + workstation_config: str, + ) -> str: + """Returns a fully-qualified workstation_config string.""" + return "projects/{project}/locations/{location}/workstationClusters/{workstation_cluster}/workstationConfigs/{workstation_config}".format( + project=project, + location=location, + workstation_cluster=workstation_cluster, + workstation_config=workstation_config, + ) + + @staticmethod + def parse_workstation_config_path(path: str) -> Dict[str, str]: + """Parses a workstation_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workstationClusters/(?P.+?)/workstationConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, WorkstationsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the workstations client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, WorkstationsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, WorkstationsTransport): + # transport is a WorkstationsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get_workstation_cluster( + self, + request: Optional[ + Union[workstations.GetWorkstationClusterRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.WorkstationCluster: + r"""Returns the requested workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_get_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.GetWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_workstation_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.GetWorkstationClusterRequest, dict]): + The request object. Request message for + GetWorkstationCluster. + name (str): + Required. Name of the requested + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.types.WorkstationCluster: + A grouping of workstation + configurations and the associated + workstations in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.GetWorkstationClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.GetWorkstationClusterRequest): + request = workstations.GetWorkstationClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_workstation_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_workstation_clusters( + self, + request: Optional[ + Union[workstations.ListWorkstationClustersRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkstationClustersPager: + r"""Returns all workstation clusters in the specified + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_list_workstation_clusters(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListWorkstationClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.ListWorkstationClustersRequest, dict]): + The request object. Request message for + ListWorkstationClusters. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.services.workstations.pagers.ListWorkstationClustersPager: + Response message for + ListWorkstationClusters. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.ListWorkstationClustersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.ListWorkstationClustersRequest): + request = workstations.ListWorkstationClustersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_workstation_clusters + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListWorkstationClustersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_workstation_cluster( + self, + request: Optional[ + Union[workstations.CreateWorkstationClusterRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + workstation_cluster: Optional[workstations.WorkstationCluster] = None, + workstation_cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_create_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.CreateWorkstationClusterRequest( + parent="parent_value", + workstation_cluster_id="workstation_cluster_id_value", + ) + + # Make the request + operation = client.create_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.CreateWorkstationClusterRequest, dict]): + The request object. Message for creating a + CreateWorkstationCluster. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_cluster (google.cloud.workstations_v1beta.types.WorkstationCluster): + Required. Workstation cluster to + create. + + This corresponds to the ``workstation_cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_cluster_id (str): + Required. ID to use for the + workstation cluster. + + This corresponds to the ``workstation_cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1beta.types.WorkstationCluster` A grouping of workstation configurations and the associated workstations + in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, workstation_cluster, workstation_cluster_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.CreateWorkstationClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.CreateWorkstationClusterRequest): + request = workstations.CreateWorkstationClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workstation_cluster is not None: + request.workstation_cluster = workstation_cluster + if workstation_cluster_id is not None: + request.workstation_cluster_id = workstation_cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_workstation_cluster + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.WorkstationCluster, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_workstation_cluster( + self, + request: Optional[ + Union[workstations.UpdateWorkstationClusterRequest, dict] + ] = None, + *, + workstation_cluster: Optional[workstations.WorkstationCluster] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates an existing workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_update_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.UpdateWorkstationClusterRequest( + ) + + # Make the request + operation = client.update_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.UpdateWorkstationClusterRequest, dict]): + The request object. Request message for + UpdateWorkstationCluster. + workstation_cluster (google.cloud.workstations_v1beta.types.WorkstationCluster): + Required. Workstation cluster to + update. + + This corresponds to the ``workstation_cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask that specifies which + fields in the workstation cluster should + be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1beta.types.WorkstationCluster` A grouping of workstation configurations and the associated workstations + in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation_cluster, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.UpdateWorkstationClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.UpdateWorkstationClusterRequest): + request = workstations.UpdateWorkstationClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation_cluster is not None: + request.workstation_cluster = workstation_cluster + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_workstation_cluster + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation_cluster.name", request.workstation_cluster.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.WorkstationCluster, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_workstation_cluster( + self, + request: Optional[ + Union[workstations.DeleteWorkstationClusterRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes the specified workstation cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_delete_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.DeleteWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.DeleteWorkstationClusterRequest, dict]): + The request object. Message for deleting a workstation + cluster. + name (str): + Required. Name of the workstation + cluster to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1beta.types.WorkstationCluster` A grouping of workstation configurations and the associated workstations + in that region. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.DeleteWorkstationClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.DeleteWorkstationClusterRequest): + request = workstations.DeleteWorkstationClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_workstation_cluster + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.WorkstationCluster, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_workstation_config( + self, + request: Optional[Union[workstations.GetWorkstationConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.WorkstationConfig: + r"""Returns the requested workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_get_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.GetWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_workstation_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.GetWorkstationConfigRequest, dict]): + The request object. Request message for + GetWorkstationConfig. + name (str): + Required. Name of the requested + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.types.WorkstationConfig: + A set of configuration options + describing how a workstation will be + run. Workstation configurations are + intended to be shared across multiple + workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.GetWorkstationConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.GetWorkstationConfigRequest): + request = workstations.GetWorkstationConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_workstation_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_workstation_configs( + self, + request: Optional[ + Union[workstations.ListWorkstationConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkstationConfigsPager: + r"""Returns all workstation configurations in the + specified cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_list_workstation_configs(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.ListWorkstationConfigsRequest, dict]): + The request object. Request message for + ListWorkstationConfigs. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.services.workstations.pagers.ListWorkstationConfigsPager: + Response message for + ListWorkstationConfigs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.ListWorkstationConfigsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.ListWorkstationConfigsRequest): + request = workstations.ListWorkstationConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_workstation_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListWorkstationConfigsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_usable_workstation_configs( + self, + request: Optional[ + Union[workstations.ListUsableWorkstationConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsableWorkstationConfigsPager: + r"""Returns all workstation configurations in the + specified cluster on which the caller has the + "workstations.workstation.create" permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_list_usable_workstation_configs(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListUsableWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstation_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.ListUsableWorkstationConfigsRequest, dict]): + The request object. Request message for + ListUsableWorkstationConfigs. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.services.workstations.pagers.ListUsableWorkstationConfigsPager: + Response message for + ListUsableWorkstationConfigs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.ListUsableWorkstationConfigsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.ListUsableWorkstationConfigsRequest): + request = workstations.ListUsableWorkstationConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_usable_workstation_configs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListUsableWorkstationConfigsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_workstation_config( + self, + request: Optional[ + Union[workstations.CreateWorkstationConfigRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + workstation_config: Optional[workstations.WorkstationConfig] = None, + workstation_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_create_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.CreateWorkstationConfigRequest( + parent="parent_value", + workstation_config_id="workstation_config_id_value", + ) + + # Make the request + operation = client.create_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.CreateWorkstationConfigRequest, dict]): + The request object. Message for creating a + CreateWorkstationConfig. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_config (google.cloud.workstations_v1beta.types.WorkstationConfig): + Required. Config to create. + This corresponds to the ``workstation_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_config_id (str): + Required. ID to use for the + workstation configuration. + + This corresponds to the ``workstation_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1beta.types.WorkstationConfig` A set of configuration options describing how a workstation will be run. + Workstation configurations are intended to be shared + across multiple workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, workstation_config, workstation_config_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.CreateWorkstationConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.CreateWorkstationConfigRequest): + request = workstations.CreateWorkstationConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workstation_config is not None: + request.workstation_config = workstation_config + if workstation_config_id is not None: + request.workstation_config_id = workstation_config_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_workstation_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.WorkstationConfig, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_workstation_config( + self, + request: Optional[ + Union[workstations.UpdateWorkstationConfigRequest, dict] + ] = None, + *, + workstation_config: Optional[workstations.WorkstationConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates an existing workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_update_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.UpdateWorkstationConfigRequest( + ) + + # Make the request + operation = client.update_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.UpdateWorkstationConfigRequest, dict]): + The request object. Request message for + UpdateWorkstationConfig. + workstation_config (google.cloud.workstations_v1beta.types.WorkstationConfig): + Required. Config to update. + This corresponds to the ``workstation_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask specifying which + fields in the workstation configuration + should be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1beta.types.WorkstationConfig` A set of configuration options describing how a workstation will be run. + Workstation configurations are intended to be shared + across multiple workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.UpdateWorkstationConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.UpdateWorkstationConfigRequest): + request = workstations.UpdateWorkstationConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation_config is not None: + request.workstation_config = workstation_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_workstation_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation_config.name", request.workstation_config.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.WorkstationConfig, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_workstation_config( + self, + request: Optional[ + Union[workstations.DeleteWorkstationConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes the specified workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_delete_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.DeleteWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.DeleteWorkstationConfigRequest, dict]): + The request object. Message for deleting a workstation + configuration. + name (str): + Required. Name of the workstation + configuration to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.workstations_v1beta.types.WorkstationConfig` A set of configuration options describing how a workstation will be run. + Workstation configurations are intended to be shared + across multiple workstations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.DeleteWorkstationConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.DeleteWorkstationConfigRequest): + request = workstations.DeleteWorkstationConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_workstation_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.WorkstationConfig, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_workstation( + self, + request: Optional[Union[workstations.GetWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.Workstation: + r"""Returns the requested workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_get_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.GetWorkstationRequest( + name="name_value", + ) + + # Make the request + response = client.get_workstation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.GetWorkstationRequest, dict]): + The request object. Request message for GetWorkstation. + name (str): + Required. Name of the requested + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.types.Workstation: + A single instance of a developer + workstation with its own persistent + storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.GetWorkstationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.GetWorkstationRequest): + request = workstations.GetWorkstationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_workstation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_workstations( + self, + request: Optional[Union[workstations.ListWorkstationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkstationsPager: + r"""Returns all Workstations using the specified + workstation configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_list_workstations(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.ListWorkstationsRequest, dict]): + The request object. Request message for ListWorkstations. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.services.workstations.pagers.ListWorkstationsPager: + Response message for + ListWorkstations. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.ListWorkstationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.ListWorkstationsRequest): + request = workstations.ListWorkstationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_workstations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListWorkstationsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_usable_workstations( + self, + request: Optional[ + Union[workstations.ListUsableWorkstationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsableWorkstationsPager: + r"""Returns all workstations using the specified + workstation configuration on which the caller has the + "workstations.workstations.use" permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_list_usable_workstations(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListUsableWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.ListUsableWorkstationsRequest, dict]): + The request object. Request message for + ListUsableWorkstations. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.services.workstations.pagers.ListUsableWorkstationsPager: + Response message for + ListUsableWorkstations. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.ListUsableWorkstationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.ListUsableWorkstationsRequest): + request = workstations.ListUsableWorkstationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_usable_workstations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListUsableWorkstationsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_workstation( + self, + request: Optional[Union[workstations.CreateWorkstationRequest, dict]] = None, + *, + parent: Optional[str] = None, + workstation: Optional[workstations.Workstation] = None, + workstation_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_create_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.CreateWorkstationRequest( + parent="parent_value", + workstation_id="workstation_id_value", + ) + + # Make the request + operation = client.create_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.CreateWorkstationRequest, dict]): + The request object. Message for creating a + CreateWorkstation. + parent (str): + Required. Parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation (google.cloud.workstations_v1beta.types.Workstation): + Required. Workstation to create. + This corresponds to the ``workstation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workstation_id (str): + Required. ID to use for the + workstation. + + This corresponds to the ``workstation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1beta.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, workstation, workstation_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.CreateWorkstationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.CreateWorkstationRequest): + request = workstations.CreateWorkstationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workstation is not None: + request.workstation = workstation + if workstation_id is not None: + request.workstation_id = workstation_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_workstation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_workstation( + self, + request: Optional[Union[workstations.UpdateWorkstationRequest, dict]] = None, + *, + workstation: Optional[workstations.Workstation] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates an existing workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_update_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.UpdateWorkstationRequest( + ) + + # Make the request + operation = client.update_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.UpdateWorkstationRequest, dict]): + The request object. Request message for + UpdateWorkstation. + workstation (google.cloud.workstations_v1beta.types.Workstation): + Required. Workstation to update. + This corresponds to the ``workstation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask specifying which + fields in the workstation configuration + should be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1beta.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.UpdateWorkstationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.UpdateWorkstationRequest): + request = workstations.UpdateWorkstationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation is not None: + request.workstation = workstation + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_workstation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation.name", request.workstation.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_workstation( + self, + request: Optional[Union[workstations.DeleteWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes the specified workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_delete_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.DeleteWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.DeleteWorkstationRequest, dict]): + The request object. Request message for + DeleteWorkstation. + name (str): + Required. Name of the workstation to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1beta.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.DeleteWorkstationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.DeleteWorkstationRequest): + request = workstations.DeleteWorkstationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_workstation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def start_workstation( + self, + request: Optional[Union[workstations.StartWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Starts running a workstation so that users can + connect to it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_start_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.StartWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.start_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.StartWorkstationRequest, dict]): + The request object. Request message for StartWorkstation. + name (str): + Required. Name of the workstation to + start. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1beta.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.StartWorkstationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.StartWorkstationRequest): + request = workstations.StartWorkstationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_workstation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def stop_workstation( + self, + request: Optional[Union[workstations.StopWorkstationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Stops running a workstation, reducing costs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_stop_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.StopWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.stop_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.StopWorkstationRequest, dict]): + The request object. Request message for StopWorkstation. + name (str): + Required. Name of the workstation to + stop. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workstations_v1beta.types.Workstation` + A single instance of a developer workstation with its + own persistent storage. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.StopWorkstationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.StopWorkstationRequest): + request = workstations.StopWorkstationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_workstation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workstations.Workstation, + metadata_type=workstations.OperationMetadata, + ) + + # Done; return the response. + return response + + def generate_access_token( + self, + request: Optional[Union[workstations.GenerateAccessTokenRequest, dict]] = None, + *, + workstation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.GenerateAccessTokenResponse: + r"""Returns a short-lived credential that can be used to + send authenticated and authorized traffic to a + workstation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import workstations_v1beta + + def sample_generate_access_token(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.GenerateAccessTokenRequest( + workstation="workstation_value", + ) + + # Make the request + response = client.generate_access_token(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.workstations_v1beta.types.GenerateAccessTokenRequest, dict]): + The request object. Request message for + GenerateAccessToken. + workstation (str): + Required. Name of the workstation for + which the access token should be + generated. + + This corresponds to the ``workstation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workstations_v1beta.types.GenerateAccessTokenResponse: + Response message for + GenerateAccessToken. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workstation]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workstations.GenerateAccessTokenRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workstations.GenerateAccessTokenRequest): + request = workstations.GenerateAccessTokenRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workstation is not None: + request.workstation = workstation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_access_token] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workstation", request.workstation),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "WorkstationsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("WorkstationsClient",) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/pagers.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/pagers.py new file mode 100644 index 000000000000..6562f9cfad37 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/pagers.py @@ -0,0 +1,673 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.workstations_v1beta.types import workstations + + +class ListWorkstationClustersPager: + """A pager for iterating through ``list_workstation_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1beta.types.ListWorkstationClustersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``workstation_clusters`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListWorkstationClusters`` requests and continue to iterate + through the ``workstation_clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1beta.types.ListWorkstationClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., workstations.ListWorkstationClustersResponse], + request: workstations.ListWorkstationClustersRequest, + response: workstations.ListWorkstationClustersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1beta.types.ListWorkstationClustersRequest): + The initial request object. + response (google.cloud.workstations_v1beta.types.ListWorkstationClustersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListWorkstationClustersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[workstations.ListWorkstationClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[workstations.WorkstationCluster]: + for page in self.pages: + yield from page.workstation_clusters + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkstationClustersAsyncPager: + """A pager for iterating through ``list_workstation_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1beta.types.ListWorkstationClustersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``workstation_clusters`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListWorkstationClusters`` requests and continue to iterate + through the ``workstation_clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1beta.types.ListWorkstationClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[workstations.ListWorkstationClustersResponse]], + request: workstations.ListWorkstationClustersRequest, + response: workstations.ListWorkstationClustersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1beta.types.ListWorkstationClustersRequest): + The initial request object. + response (google.cloud.workstations_v1beta.types.ListWorkstationClustersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListWorkstationClustersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[workstations.ListWorkstationClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[workstations.WorkstationCluster]: + async def async_generator(): + async for page in self.pages: + for response in page.workstation_clusters: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkstationConfigsPager: + """A pager for iterating through ``list_workstation_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1beta.types.ListWorkstationConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``workstation_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListWorkstationConfigs`` requests and continue to iterate + through the ``workstation_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1beta.types.ListWorkstationConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., workstations.ListWorkstationConfigsResponse], + request: workstations.ListWorkstationConfigsRequest, + response: workstations.ListWorkstationConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1beta.types.ListWorkstationConfigsRequest): + The initial request object. + response (google.cloud.workstations_v1beta.types.ListWorkstationConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListWorkstationConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[workstations.ListWorkstationConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[workstations.WorkstationConfig]: + for page in self.pages: + yield from page.workstation_configs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkstationConfigsAsyncPager: + """A pager for iterating through ``list_workstation_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1beta.types.ListWorkstationConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``workstation_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListWorkstationConfigs`` requests and continue to iterate + through the ``workstation_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1beta.types.ListWorkstationConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[workstations.ListWorkstationConfigsResponse]], + request: workstations.ListWorkstationConfigsRequest, + response: workstations.ListWorkstationConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1beta.types.ListWorkstationConfigsRequest): + The initial request object. + response (google.cloud.workstations_v1beta.types.ListWorkstationConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListWorkstationConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[workstations.ListWorkstationConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[workstations.WorkstationConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.workstation_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUsableWorkstationConfigsPager: + """A pager for iterating through ``list_usable_workstation_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1beta.types.ListUsableWorkstationConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``workstation_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListUsableWorkstationConfigs`` requests and continue to iterate + through the ``workstation_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1beta.types.ListUsableWorkstationConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., workstations.ListUsableWorkstationConfigsResponse], + request: workstations.ListUsableWorkstationConfigsRequest, + response: workstations.ListUsableWorkstationConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1beta.types.ListUsableWorkstationConfigsRequest): + The initial request object. + response (google.cloud.workstations_v1beta.types.ListUsableWorkstationConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListUsableWorkstationConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[workstations.ListUsableWorkstationConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[workstations.WorkstationConfig]: + for page in self.pages: + yield from page.workstation_configs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUsableWorkstationConfigsAsyncPager: + """A pager for iterating through ``list_usable_workstation_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1beta.types.ListUsableWorkstationConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``workstation_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListUsableWorkstationConfigs`` requests and continue to iterate + through the ``workstation_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1beta.types.ListUsableWorkstationConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[workstations.ListUsableWorkstationConfigsResponse] + ], + request: workstations.ListUsableWorkstationConfigsRequest, + response: workstations.ListUsableWorkstationConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1beta.types.ListUsableWorkstationConfigsRequest): + The initial request object. + response (google.cloud.workstations_v1beta.types.ListUsableWorkstationConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListUsableWorkstationConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[workstations.ListUsableWorkstationConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[workstations.WorkstationConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.workstation_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkstationsPager: + """A pager for iterating through ``list_workstations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1beta.types.ListWorkstationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``workstations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListWorkstations`` requests and continue to iterate + through the ``workstations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1beta.types.ListWorkstationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., workstations.ListWorkstationsResponse], + request: workstations.ListWorkstationsRequest, + response: workstations.ListWorkstationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1beta.types.ListWorkstationsRequest): + The initial request object. + response (google.cloud.workstations_v1beta.types.ListWorkstationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListWorkstationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[workstations.ListWorkstationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[workstations.Workstation]: + for page in self.pages: + yield from page.workstations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkstationsAsyncPager: + """A pager for iterating through ``list_workstations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1beta.types.ListWorkstationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``workstations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListWorkstations`` requests and continue to iterate + through the ``workstations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1beta.types.ListWorkstationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[workstations.ListWorkstationsResponse]], + request: workstations.ListWorkstationsRequest, + response: workstations.ListWorkstationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1beta.types.ListWorkstationsRequest): + The initial request object. + response (google.cloud.workstations_v1beta.types.ListWorkstationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListWorkstationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[workstations.ListWorkstationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[workstations.Workstation]: + async def async_generator(): + async for page in self.pages: + for response in page.workstations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUsableWorkstationsPager: + """A pager for iterating through ``list_usable_workstations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1beta.types.ListUsableWorkstationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``workstations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListUsableWorkstations`` requests and continue to iterate + through the ``workstations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1beta.types.ListUsableWorkstationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., workstations.ListUsableWorkstationsResponse], + request: workstations.ListUsableWorkstationsRequest, + response: workstations.ListUsableWorkstationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1beta.types.ListUsableWorkstationsRequest): + The initial request object. + response (google.cloud.workstations_v1beta.types.ListUsableWorkstationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListUsableWorkstationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[workstations.ListUsableWorkstationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[workstations.Workstation]: + for page in self.pages: + yield from page.workstations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUsableWorkstationsAsyncPager: + """A pager for iterating through ``list_usable_workstations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workstations_v1beta.types.ListUsableWorkstationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``workstations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListUsableWorkstations`` requests and continue to iterate + through the ``workstations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workstations_v1beta.types.ListUsableWorkstationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[workstations.ListUsableWorkstationsResponse]], + request: workstations.ListUsableWorkstationsRequest, + response: workstations.ListUsableWorkstationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workstations_v1beta.types.ListUsableWorkstationsRequest): + The initial request object. + response (google.cloud.workstations_v1beta.types.ListUsableWorkstationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workstations.ListUsableWorkstationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[workstations.ListUsableWorkstationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[workstations.Workstation]: + async def async_generator(): + async for page in self.pages: + for response in page.workstations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/transports/__init__.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/transports/__init__.py new file mode 100644 index 000000000000..9facbb0989aa --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import WorkstationsTransport +from .grpc import WorkstationsGrpcTransport +from .grpc_asyncio import WorkstationsGrpcAsyncIOTransport +from .rest import WorkstationsRestInterceptor, WorkstationsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[WorkstationsTransport]] +_transport_registry["grpc"] = WorkstationsGrpcTransport +_transport_registry["grpc_asyncio"] = WorkstationsGrpcAsyncIOTransport +_transport_registry["rest"] = WorkstationsRestTransport + +__all__ = ( + "WorkstationsTransport", + "WorkstationsGrpcTransport", + "WorkstationsGrpcAsyncIOTransport", + "WorkstationsRestTransport", + "WorkstationsRestInterceptor", +) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/transports/base.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/transports/base.py new file mode 100644 index 000000000000..16ee3c24aef7 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/transports/base.py @@ -0,0 +1,596 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.workstations_v1beta import gapic_version as package_version +from google.cloud.workstations_v1beta.types import workstations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class WorkstationsTransport(abc.ABC): + """Abstract transport class for Workstations.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "workstations.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_workstation_cluster: gapic_v1.method.wrap_method( + self.get_workstation_cluster, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_workstation_clusters: gapic_v1.method.wrap_method( + self.list_workstation_clusters, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_workstation_cluster: gapic_v1.method.wrap_method( + self.create_workstation_cluster, + default_timeout=60.0, + client_info=client_info, + ), + self.update_workstation_cluster: gapic_v1.method.wrap_method( + self.update_workstation_cluster, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_workstation_cluster: gapic_v1.method.wrap_method( + self.delete_workstation_cluster, + default_timeout=60.0, + client_info=client_info, + ), + self.get_workstation_config: gapic_v1.method.wrap_method( + self.get_workstation_config, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_workstation_configs: gapic_v1.method.wrap_method( + self.list_workstation_configs, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_usable_workstation_configs: gapic_v1.method.wrap_method( + self.list_usable_workstation_configs, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_workstation_config: gapic_v1.method.wrap_method( + self.create_workstation_config, + default_timeout=60.0, + client_info=client_info, + ), + self.update_workstation_config: gapic_v1.method.wrap_method( + self.update_workstation_config, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_workstation_config: gapic_v1.method.wrap_method( + self.delete_workstation_config, + default_timeout=60.0, + client_info=client_info, + ), + self.get_workstation: gapic_v1.method.wrap_method( + self.get_workstation, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_workstations: gapic_v1.method.wrap_method( + self.list_workstations, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_usable_workstations: gapic_v1.method.wrap_method( + self.list_usable_workstations, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_workstation: gapic_v1.method.wrap_method( + self.create_workstation, + default_timeout=60.0, + client_info=client_info, + ), + self.update_workstation: gapic_v1.method.wrap_method( + self.update_workstation, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_workstation: gapic_v1.method.wrap_method( + self.delete_workstation, + default_timeout=60.0, + client_info=client_info, + ), + self.start_workstation: gapic_v1.method.wrap_method( + self.start_workstation, + default_timeout=60.0, + client_info=client_info, + ), + self.stop_workstation: gapic_v1.method.wrap_method( + self.stop_workstation, + default_timeout=60.0, + client_info=client_info, + ), + self.generate_access_token: gapic_v1.method.wrap_method( + self.generate_access_token, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def get_workstation_cluster( + self, + ) -> Callable[ + [workstations.GetWorkstationClusterRequest], + Union[ + workstations.WorkstationCluster, Awaitable[workstations.WorkstationCluster] + ], + ]: + raise NotImplementedError() + + @property + def list_workstation_clusters( + self, + ) -> Callable[ + [workstations.ListWorkstationClustersRequest], + Union[ + workstations.ListWorkstationClustersResponse, + Awaitable[workstations.ListWorkstationClustersResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_workstation_cluster( + self, + ) -> Callable[ + [workstations.CreateWorkstationClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_workstation_cluster( + self, + ) -> Callable[ + [workstations.UpdateWorkstationClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_workstation_cluster( + self, + ) -> Callable[ + [workstations.DeleteWorkstationClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_workstation_config( + self, + ) -> Callable[ + [workstations.GetWorkstationConfigRequest], + Union[ + workstations.WorkstationConfig, Awaitable[workstations.WorkstationConfig] + ], + ]: + raise NotImplementedError() + + @property + def list_workstation_configs( + self, + ) -> Callable[ + [workstations.ListWorkstationConfigsRequest], + Union[ + workstations.ListWorkstationConfigsResponse, + Awaitable[workstations.ListWorkstationConfigsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_usable_workstation_configs( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationConfigsRequest], + Union[ + workstations.ListUsableWorkstationConfigsResponse, + Awaitable[workstations.ListUsableWorkstationConfigsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_workstation_config( + self, + ) -> Callable[ + [workstations.CreateWorkstationConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_workstation_config( + self, + ) -> Callable[ + [workstations.UpdateWorkstationConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_workstation_config( + self, + ) -> Callable[ + [workstations.DeleteWorkstationConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_workstation( + self, + ) -> Callable[ + [workstations.GetWorkstationRequest], + Union[workstations.Workstation, Awaitable[workstations.Workstation]], + ]: + raise NotImplementedError() + + @property + def list_workstations( + self, + ) -> Callable[ + [workstations.ListWorkstationsRequest], + Union[ + workstations.ListWorkstationsResponse, + Awaitable[workstations.ListWorkstationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_usable_workstations( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationsRequest], + Union[ + workstations.ListUsableWorkstationsResponse, + Awaitable[workstations.ListUsableWorkstationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_workstation( + self, + ) -> Callable[ + [workstations.CreateWorkstationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_workstation( + self, + ) -> Callable[ + [workstations.UpdateWorkstationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_workstation( + self, + ) -> Callable[ + [workstations.DeleteWorkstationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def start_workstation( + self, + ) -> Callable[ + [workstations.StartWorkstationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def stop_workstation( + self, + ) -> Callable[ + [workstations.StopWorkstationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def generate_access_token( + self, + ) -> Callable[ + [workstations.GenerateAccessTokenRequest], + Union[ + workstations.GenerateAccessTokenResponse, + Awaitable[workstations.GenerateAccessTokenResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("WorkstationsTransport",) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/transports/grpc.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/transports/grpc.py new file mode 100644 index 000000000000..c98496c3c7e0 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/transports/grpc.py @@ -0,0 +1,974 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.workstations_v1beta.types import workstations + +from .base import DEFAULT_CLIENT_INFO, WorkstationsTransport + + +class WorkstationsGrpcTransport(WorkstationsTransport): + """gRPC backend transport for Workstations. + + Service for interacting with Cloud Workstations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "workstations.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "workstations.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def get_workstation_cluster( + self, + ) -> Callable[ + [workstations.GetWorkstationClusterRequest], workstations.WorkstationCluster + ]: + r"""Return a callable for the get workstation cluster method over gRPC. + + Returns the requested workstation cluster. + + Returns: + Callable[[~.GetWorkstationClusterRequest], + ~.WorkstationCluster]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workstation_cluster" not in self._stubs: + self._stubs["get_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/GetWorkstationCluster", + request_serializer=workstations.GetWorkstationClusterRequest.serialize, + response_deserializer=workstations.WorkstationCluster.deserialize, + ) + return self._stubs["get_workstation_cluster"] + + @property + def list_workstation_clusters( + self, + ) -> Callable[ + [workstations.ListWorkstationClustersRequest], + workstations.ListWorkstationClustersResponse, + ]: + r"""Return a callable for the list workstation clusters method over gRPC. + + Returns all workstation clusters in the specified + location. + + Returns: + Callable[[~.ListWorkstationClustersRequest], + ~.ListWorkstationClustersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workstation_clusters" not in self._stubs: + self._stubs["list_workstation_clusters"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/ListWorkstationClusters", + request_serializer=workstations.ListWorkstationClustersRequest.serialize, + response_deserializer=workstations.ListWorkstationClustersResponse.deserialize, + ) + return self._stubs["list_workstation_clusters"] + + @property + def create_workstation_cluster( + self, + ) -> Callable[ + [workstations.CreateWorkstationClusterRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create workstation cluster method over gRPC. + + Creates a new workstation cluster. + + Returns: + Callable[[~.CreateWorkstationClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workstation_cluster" not in self._stubs: + self._stubs["create_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/CreateWorkstationCluster", + request_serializer=workstations.CreateWorkstationClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workstation_cluster"] + + @property + def update_workstation_cluster( + self, + ) -> Callable[ + [workstations.UpdateWorkstationClusterRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update workstation cluster method over gRPC. + + Updates an existing workstation cluster. + + Returns: + Callable[[~.UpdateWorkstationClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workstation_cluster" not in self._stubs: + self._stubs["update_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/UpdateWorkstationCluster", + request_serializer=workstations.UpdateWorkstationClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_workstation_cluster"] + + @property + def delete_workstation_cluster( + self, + ) -> Callable[ + [workstations.DeleteWorkstationClusterRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete workstation cluster method over gRPC. + + Deletes the specified workstation cluster. + + Returns: + Callable[[~.DeleteWorkstationClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workstation_cluster" not in self._stubs: + self._stubs["delete_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/DeleteWorkstationCluster", + request_serializer=workstations.DeleteWorkstationClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_workstation_cluster"] + + @property + def get_workstation_config( + self, + ) -> Callable[ + [workstations.GetWorkstationConfigRequest], workstations.WorkstationConfig + ]: + r"""Return a callable for the get workstation config method over gRPC. + + Returns the requested workstation configuration. + + Returns: + Callable[[~.GetWorkstationConfigRequest], + ~.WorkstationConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workstation_config" not in self._stubs: + self._stubs["get_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/GetWorkstationConfig", + request_serializer=workstations.GetWorkstationConfigRequest.serialize, + response_deserializer=workstations.WorkstationConfig.deserialize, + ) + return self._stubs["get_workstation_config"] + + @property + def list_workstation_configs( + self, + ) -> Callable[ + [workstations.ListWorkstationConfigsRequest], + workstations.ListWorkstationConfigsResponse, + ]: + r"""Return a callable for the list workstation configs method over gRPC. + + Returns all workstation configurations in the + specified cluster. + + Returns: + Callable[[~.ListWorkstationConfigsRequest], + ~.ListWorkstationConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workstation_configs" not in self._stubs: + self._stubs["list_workstation_configs"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/ListWorkstationConfigs", + request_serializer=workstations.ListWorkstationConfigsRequest.serialize, + response_deserializer=workstations.ListWorkstationConfigsResponse.deserialize, + ) + return self._stubs["list_workstation_configs"] + + @property + def list_usable_workstation_configs( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationConfigsRequest], + workstations.ListUsableWorkstationConfigsResponse, + ]: + r"""Return a callable for the list usable workstation + configs method over gRPC. + + Returns all workstation configurations in the + specified cluster on which the caller has the + "workstations.workstation.create" permission. + + Returns: + Callable[[~.ListUsableWorkstationConfigsRequest], + ~.ListUsableWorkstationConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_usable_workstation_configs" not in self._stubs: + self._stubs[ + "list_usable_workstation_configs" + ] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/ListUsableWorkstationConfigs", + request_serializer=workstations.ListUsableWorkstationConfigsRequest.serialize, + response_deserializer=workstations.ListUsableWorkstationConfigsResponse.deserialize, + ) + return self._stubs["list_usable_workstation_configs"] + + @property + def create_workstation_config( + self, + ) -> Callable[ + [workstations.CreateWorkstationConfigRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create workstation config method over gRPC. + + Creates a new workstation configuration. + + Returns: + Callable[[~.CreateWorkstationConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workstation_config" not in self._stubs: + self._stubs["create_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/CreateWorkstationConfig", + request_serializer=workstations.CreateWorkstationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workstation_config"] + + @property + def update_workstation_config( + self, + ) -> Callable[ + [workstations.UpdateWorkstationConfigRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update workstation config method over gRPC. + + Updates an existing workstation configuration. + + Returns: + Callable[[~.UpdateWorkstationConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workstation_config" not in self._stubs: + self._stubs["update_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/UpdateWorkstationConfig", + request_serializer=workstations.UpdateWorkstationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_workstation_config"] + + @property + def delete_workstation_config( + self, + ) -> Callable[ + [workstations.DeleteWorkstationConfigRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete workstation config method over gRPC. + + Deletes the specified workstation configuration. + + Returns: + Callable[[~.DeleteWorkstationConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workstation_config" not in self._stubs: + self._stubs["delete_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/DeleteWorkstationConfig", + request_serializer=workstations.DeleteWorkstationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_workstation_config"] + + @property + def get_workstation( + self, + ) -> Callable[[workstations.GetWorkstationRequest], workstations.Workstation]: + r"""Return a callable for the get workstation method over gRPC. + + Returns the requested workstation. + + Returns: + Callable[[~.GetWorkstationRequest], + ~.Workstation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workstation" not in self._stubs: + self._stubs["get_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/GetWorkstation", + request_serializer=workstations.GetWorkstationRequest.serialize, + response_deserializer=workstations.Workstation.deserialize, + ) + return self._stubs["get_workstation"] + + @property + def list_workstations( + self, + ) -> Callable[ + [workstations.ListWorkstationsRequest], workstations.ListWorkstationsResponse + ]: + r"""Return a callable for the list workstations method over gRPC. + + Returns all Workstations using the specified + workstation configuration. + + Returns: + Callable[[~.ListWorkstationsRequest], + ~.ListWorkstationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workstations" not in self._stubs: + self._stubs["list_workstations"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/ListWorkstations", + request_serializer=workstations.ListWorkstationsRequest.serialize, + response_deserializer=workstations.ListWorkstationsResponse.deserialize, + ) + return self._stubs["list_workstations"] + + @property + def list_usable_workstations( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationsRequest], + workstations.ListUsableWorkstationsResponse, + ]: + r"""Return a callable for the list usable workstations method over gRPC. + + Returns all workstations using the specified + workstation configuration on which the caller has the + "workstations.workstations.use" permission. + + Returns: + Callable[[~.ListUsableWorkstationsRequest], + ~.ListUsableWorkstationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_usable_workstations" not in self._stubs: + self._stubs["list_usable_workstations"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/ListUsableWorkstations", + request_serializer=workstations.ListUsableWorkstationsRequest.serialize, + response_deserializer=workstations.ListUsableWorkstationsResponse.deserialize, + ) + return self._stubs["list_usable_workstations"] + + @property + def create_workstation( + self, + ) -> Callable[[workstations.CreateWorkstationRequest], operations_pb2.Operation]: + r"""Return a callable for the create workstation method over gRPC. + + Creates a new workstation. + + Returns: + Callable[[~.CreateWorkstationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workstation" not in self._stubs: + self._stubs["create_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/CreateWorkstation", + request_serializer=workstations.CreateWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workstation"] + + @property + def update_workstation( + self, + ) -> Callable[[workstations.UpdateWorkstationRequest], operations_pb2.Operation]: + r"""Return a callable for the update workstation method over gRPC. + + Updates an existing workstation. + + Returns: + Callable[[~.UpdateWorkstationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workstation" not in self._stubs: + self._stubs["update_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/UpdateWorkstation", + request_serializer=workstations.UpdateWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_workstation"] + + @property + def delete_workstation( + self, + ) -> Callable[[workstations.DeleteWorkstationRequest], operations_pb2.Operation]: + r"""Return a callable for the delete workstation method over gRPC. + + Deletes the specified workstation. + + Returns: + Callable[[~.DeleteWorkstationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workstation" not in self._stubs: + self._stubs["delete_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/DeleteWorkstation", + request_serializer=workstations.DeleteWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_workstation"] + + @property + def start_workstation( + self, + ) -> Callable[[workstations.StartWorkstationRequest], operations_pb2.Operation]: + r"""Return a callable for the start workstation method over gRPC. + + Starts running a workstation so that users can + connect to it. + + Returns: + Callable[[~.StartWorkstationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_workstation" not in self._stubs: + self._stubs["start_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/StartWorkstation", + request_serializer=workstations.StartWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["start_workstation"] + + @property + def stop_workstation( + self, + ) -> Callable[[workstations.StopWorkstationRequest], operations_pb2.Operation]: + r"""Return a callable for the stop workstation method over gRPC. + + Stops running a workstation, reducing costs. + + Returns: + Callable[[~.StopWorkstationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_workstation" not in self._stubs: + self._stubs["stop_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/StopWorkstation", + request_serializer=workstations.StopWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["stop_workstation"] + + @property + def generate_access_token( + self, + ) -> Callable[ + [workstations.GenerateAccessTokenRequest], + workstations.GenerateAccessTokenResponse, + ]: + r"""Return a callable for the generate access token method over gRPC. + + Returns a short-lived credential that can be used to + send authenticated and authorized traffic to a + workstation. + + Returns: + Callable[[~.GenerateAccessTokenRequest], + ~.GenerateAccessTokenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_access_token" not in self._stubs: + self._stubs["generate_access_token"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/GenerateAccessToken", + request_serializer=workstations.GenerateAccessTokenRequest.serialize, + response_deserializer=workstations.GenerateAccessTokenResponse.deserialize, + ) + return self._stubs["generate_access_token"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("WorkstationsGrpcTransport",) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/transports/grpc_asyncio.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/transports/grpc_asyncio.py new file mode 100644 index 000000000000..7ba94bbee138 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/transports/grpc_asyncio.py @@ -0,0 +1,996 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.workstations_v1beta.types import workstations + +from .base import DEFAULT_CLIENT_INFO, WorkstationsTransport +from .grpc import WorkstationsGrpcTransport + + +class WorkstationsGrpcAsyncIOTransport(WorkstationsTransport): + """gRPC AsyncIO backend transport for Workstations. + + Service for interacting with Cloud Workstations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "workstations.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "workstations.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def get_workstation_cluster( + self, + ) -> Callable[ + [workstations.GetWorkstationClusterRequest], + Awaitable[workstations.WorkstationCluster], + ]: + r"""Return a callable for the get workstation cluster method over gRPC. + + Returns the requested workstation cluster. + + Returns: + Callable[[~.GetWorkstationClusterRequest], + Awaitable[~.WorkstationCluster]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workstation_cluster" not in self._stubs: + self._stubs["get_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/GetWorkstationCluster", + request_serializer=workstations.GetWorkstationClusterRequest.serialize, + response_deserializer=workstations.WorkstationCluster.deserialize, + ) + return self._stubs["get_workstation_cluster"] + + @property + def list_workstation_clusters( + self, + ) -> Callable[ + [workstations.ListWorkstationClustersRequest], + Awaitable[workstations.ListWorkstationClustersResponse], + ]: + r"""Return a callable for the list workstation clusters method over gRPC. + + Returns all workstation clusters in the specified + location. + + Returns: + Callable[[~.ListWorkstationClustersRequest], + Awaitable[~.ListWorkstationClustersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workstation_clusters" not in self._stubs: + self._stubs["list_workstation_clusters"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/ListWorkstationClusters", + request_serializer=workstations.ListWorkstationClustersRequest.serialize, + response_deserializer=workstations.ListWorkstationClustersResponse.deserialize, + ) + return self._stubs["list_workstation_clusters"] + + @property + def create_workstation_cluster( + self, + ) -> Callable[ + [workstations.CreateWorkstationClusterRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create workstation cluster method over gRPC. + + Creates a new workstation cluster. + + Returns: + Callable[[~.CreateWorkstationClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workstation_cluster" not in self._stubs: + self._stubs["create_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/CreateWorkstationCluster", + request_serializer=workstations.CreateWorkstationClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workstation_cluster"] + + @property + def update_workstation_cluster( + self, + ) -> Callable[ + [workstations.UpdateWorkstationClusterRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update workstation cluster method over gRPC. + + Updates an existing workstation cluster. + + Returns: + Callable[[~.UpdateWorkstationClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workstation_cluster" not in self._stubs: + self._stubs["update_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/UpdateWorkstationCluster", + request_serializer=workstations.UpdateWorkstationClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_workstation_cluster"] + + @property + def delete_workstation_cluster( + self, + ) -> Callable[ + [workstations.DeleteWorkstationClusterRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete workstation cluster method over gRPC. + + Deletes the specified workstation cluster. + + Returns: + Callable[[~.DeleteWorkstationClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workstation_cluster" not in self._stubs: + self._stubs["delete_workstation_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/DeleteWorkstationCluster", + request_serializer=workstations.DeleteWorkstationClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_workstation_cluster"] + + @property + def get_workstation_config( + self, + ) -> Callable[ + [workstations.GetWorkstationConfigRequest], + Awaitable[workstations.WorkstationConfig], + ]: + r"""Return a callable for the get workstation config method over gRPC. + + Returns the requested workstation configuration. + + Returns: + Callable[[~.GetWorkstationConfigRequest], + Awaitable[~.WorkstationConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workstation_config" not in self._stubs: + self._stubs["get_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/GetWorkstationConfig", + request_serializer=workstations.GetWorkstationConfigRequest.serialize, + response_deserializer=workstations.WorkstationConfig.deserialize, + ) + return self._stubs["get_workstation_config"] + + @property + def list_workstation_configs( + self, + ) -> Callable[ + [workstations.ListWorkstationConfigsRequest], + Awaitable[workstations.ListWorkstationConfigsResponse], + ]: + r"""Return a callable for the list workstation configs method over gRPC. + + Returns all workstation configurations in the + specified cluster. + + Returns: + Callable[[~.ListWorkstationConfigsRequest], + Awaitable[~.ListWorkstationConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workstation_configs" not in self._stubs: + self._stubs["list_workstation_configs"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/ListWorkstationConfigs", + request_serializer=workstations.ListWorkstationConfigsRequest.serialize, + response_deserializer=workstations.ListWorkstationConfigsResponse.deserialize, + ) + return self._stubs["list_workstation_configs"] + + @property + def list_usable_workstation_configs( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationConfigsRequest], + Awaitable[workstations.ListUsableWorkstationConfigsResponse], + ]: + r"""Return a callable for the list usable workstation + configs method over gRPC. + + Returns all workstation configurations in the + specified cluster on which the caller has the + "workstations.workstation.create" permission. + + Returns: + Callable[[~.ListUsableWorkstationConfigsRequest], + Awaitable[~.ListUsableWorkstationConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_usable_workstation_configs" not in self._stubs: + self._stubs[ + "list_usable_workstation_configs" + ] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/ListUsableWorkstationConfigs", + request_serializer=workstations.ListUsableWorkstationConfigsRequest.serialize, + response_deserializer=workstations.ListUsableWorkstationConfigsResponse.deserialize, + ) + return self._stubs["list_usable_workstation_configs"] + + @property + def create_workstation_config( + self, + ) -> Callable[ + [workstations.CreateWorkstationConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create workstation config method over gRPC. + + Creates a new workstation configuration. + + Returns: + Callable[[~.CreateWorkstationConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workstation_config" not in self._stubs: + self._stubs["create_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/CreateWorkstationConfig", + request_serializer=workstations.CreateWorkstationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workstation_config"] + + @property + def update_workstation_config( + self, + ) -> Callable[ + [workstations.UpdateWorkstationConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update workstation config method over gRPC. + + Updates an existing workstation configuration. + + Returns: + Callable[[~.UpdateWorkstationConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workstation_config" not in self._stubs: + self._stubs["update_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/UpdateWorkstationConfig", + request_serializer=workstations.UpdateWorkstationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_workstation_config"] + + @property + def delete_workstation_config( + self, + ) -> Callable[ + [workstations.DeleteWorkstationConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete workstation config method over gRPC. + + Deletes the specified workstation configuration. + + Returns: + Callable[[~.DeleteWorkstationConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workstation_config" not in self._stubs: + self._stubs["delete_workstation_config"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/DeleteWorkstationConfig", + request_serializer=workstations.DeleteWorkstationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_workstation_config"] + + @property + def get_workstation( + self, + ) -> Callable[ + [workstations.GetWorkstationRequest], Awaitable[workstations.Workstation] + ]: + r"""Return a callable for the get workstation method over gRPC. + + Returns the requested workstation. + + Returns: + Callable[[~.GetWorkstationRequest], + Awaitable[~.Workstation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workstation" not in self._stubs: + self._stubs["get_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/GetWorkstation", + request_serializer=workstations.GetWorkstationRequest.serialize, + response_deserializer=workstations.Workstation.deserialize, + ) + return self._stubs["get_workstation"] + + @property + def list_workstations( + self, + ) -> Callable[ + [workstations.ListWorkstationsRequest], + Awaitable[workstations.ListWorkstationsResponse], + ]: + r"""Return a callable for the list workstations method over gRPC. + + Returns all Workstations using the specified + workstation configuration. + + Returns: + Callable[[~.ListWorkstationsRequest], + Awaitable[~.ListWorkstationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workstations" not in self._stubs: + self._stubs["list_workstations"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/ListWorkstations", + request_serializer=workstations.ListWorkstationsRequest.serialize, + response_deserializer=workstations.ListWorkstationsResponse.deserialize, + ) + return self._stubs["list_workstations"] + + @property + def list_usable_workstations( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationsRequest], + Awaitable[workstations.ListUsableWorkstationsResponse], + ]: + r"""Return a callable for the list usable workstations method over gRPC. + + Returns all workstations using the specified + workstation configuration on which the caller has the + "workstations.workstations.use" permission. + + Returns: + Callable[[~.ListUsableWorkstationsRequest], + Awaitable[~.ListUsableWorkstationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_usable_workstations" not in self._stubs: + self._stubs["list_usable_workstations"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/ListUsableWorkstations", + request_serializer=workstations.ListUsableWorkstationsRequest.serialize, + response_deserializer=workstations.ListUsableWorkstationsResponse.deserialize, + ) + return self._stubs["list_usable_workstations"] + + @property + def create_workstation( + self, + ) -> Callable[ + [workstations.CreateWorkstationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create workstation method over gRPC. + + Creates a new workstation. + + Returns: + Callable[[~.CreateWorkstationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workstation" not in self._stubs: + self._stubs["create_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/CreateWorkstation", + request_serializer=workstations.CreateWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workstation"] + + @property + def update_workstation( + self, + ) -> Callable[ + [workstations.UpdateWorkstationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update workstation method over gRPC. + + Updates an existing workstation. + + Returns: + Callable[[~.UpdateWorkstationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workstation" not in self._stubs: + self._stubs["update_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/UpdateWorkstation", + request_serializer=workstations.UpdateWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_workstation"] + + @property + def delete_workstation( + self, + ) -> Callable[ + [workstations.DeleteWorkstationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete workstation method over gRPC. + + Deletes the specified workstation. + + Returns: + Callable[[~.DeleteWorkstationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workstation" not in self._stubs: + self._stubs["delete_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/DeleteWorkstation", + request_serializer=workstations.DeleteWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_workstation"] + + @property + def start_workstation( + self, + ) -> Callable[ + [workstations.StartWorkstationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the start workstation method over gRPC. + + Starts running a workstation so that users can + connect to it. + + Returns: + Callable[[~.StartWorkstationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_workstation" not in self._stubs: + self._stubs["start_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/StartWorkstation", + request_serializer=workstations.StartWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["start_workstation"] + + @property + def stop_workstation( + self, + ) -> Callable[ + [workstations.StopWorkstationRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the stop workstation method over gRPC. + + Stops running a workstation, reducing costs. + + Returns: + Callable[[~.StopWorkstationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_workstation" not in self._stubs: + self._stubs["stop_workstation"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/StopWorkstation", + request_serializer=workstations.StopWorkstationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["stop_workstation"] + + @property + def generate_access_token( + self, + ) -> Callable[ + [workstations.GenerateAccessTokenRequest], + Awaitable[workstations.GenerateAccessTokenResponse], + ]: + r"""Return a callable for the generate access token method over gRPC. + + Returns a short-lived credential that can be used to + send authenticated and authorized traffic to a + workstation. + + Returns: + Callable[[~.GenerateAccessTokenRequest], + Awaitable[~.GenerateAccessTokenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_access_token" not in self._stubs: + self._stubs["generate_access_token"] = self.grpc_channel.unary_unary( + "/google.cloud.workstations.v1beta.Workstations/GenerateAccessToken", + request_serializer=workstations.GenerateAccessTokenRequest.serialize, + response_deserializer=workstations.GenerateAccessTokenResponse.deserialize, + ) + return self._stubs["generate_access_token"] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("WorkstationsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/transports/rest.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/transports/rest.py new file mode 100644 index 000000000000..43bf39b9bd14 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/services/workstations/transports/rest.py @@ -0,0 +1,3602 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.workstations_v1beta.types import workstations + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import WorkstationsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class WorkstationsRestInterceptor: + """Interceptor for Workstations. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the WorkstationsRestTransport. + + .. code-block:: python + class MyCustomWorkstationsInterceptor(WorkstationsRestInterceptor): + def pre_create_workstation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_workstation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_workstation_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_workstation_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_workstation_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_workstation_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_workstation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_workstation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_workstation_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_workstation_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_workstation_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_workstation_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_access_token(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_access_token(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_workstation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_workstation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_workstation_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_workstation_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_workstation_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_workstation_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_usable_workstation_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_usable_workstation_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_usable_workstations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_usable_workstations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_workstation_clusters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_workstation_clusters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_workstation_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_workstation_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_workstations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_workstations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_start_workstation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_start_workstation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_stop_workstation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_stop_workstation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_workstation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_workstation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_workstation_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_workstation_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_workstation_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_workstation_config(self, response): + logging.log(f"Received response: {response}") + return response + + transport = WorkstationsRestTransport(interceptor=MyCustomWorkstationsInterceptor()) + client = WorkstationsClient(transport=transport) + + + """ + + def pre_create_workstation( + self, + request: workstations.CreateWorkstationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.CreateWorkstationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_workstation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_create_workstation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_workstation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_create_workstation_cluster( + self, + request: workstations.CreateWorkstationClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.CreateWorkstationClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_workstation_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_create_workstation_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_workstation_cluster + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_create_workstation_config( + self, + request: workstations.CreateWorkstationConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.CreateWorkstationConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_workstation_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_create_workstation_config( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_workstation_config + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_delete_workstation( + self, + request: workstations.DeleteWorkstationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.DeleteWorkstationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_workstation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_delete_workstation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_workstation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_delete_workstation_cluster( + self, + request: workstations.DeleteWorkstationClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.DeleteWorkstationClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_workstation_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_delete_workstation_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_workstation_cluster + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_delete_workstation_config( + self, + request: workstations.DeleteWorkstationConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.DeleteWorkstationConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_workstation_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_delete_workstation_config( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_workstation_config + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_generate_access_token( + self, + request: workstations.GenerateAccessTokenRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.GenerateAccessTokenRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_access_token + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_generate_access_token( + self, response: workstations.GenerateAccessTokenResponse + ) -> workstations.GenerateAccessTokenResponse: + """Post-rpc interceptor for generate_access_token + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_get_workstation( + self, + request: workstations.GetWorkstationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.GetWorkstationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_workstation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_get_workstation( + self, response: workstations.Workstation + ) -> workstations.Workstation: + """Post-rpc interceptor for get_workstation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_get_workstation_cluster( + self, + request: workstations.GetWorkstationClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.GetWorkstationClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_workstation_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_get_workstation_cluster( + self, response: workstations.WorkstationCluster + ) -> workstations.WorkstationCluster: + """Post-rpc interceptor for get_workstation_cluster + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_get_workstation_config( + self, + request: workstations.GetWorkstationConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.GetWorkstationConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_workstation_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_get_workstation_config( + self, response: workstations.WorkstationConfig + ) -> workstations.WorkstationConfig: + """Post-rpc interceptor for get_workstation_config + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_list_usable_workstation_configs( + self, + request: workstations.ListUsableWorkstationConfigsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + workstations.ListUsableWorkstationConfigsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_usable_workstation_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_list_usable_workstation_configs( + self, response: workstations.ListUsableWorkstationConfigsResponse + ) -> workstations.ListUsableWorkstationConfigsResponse: + """Post-rpc interceptor for list_usable_workstation_configs + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_list_usable_workstations( + self, + request: workstations.ListUsableWorkstationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.ListUsableWorkstationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_usable_workstations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_list_usable_workstations( + self, response: workstations.ListUsableWorkstationsResponse + ) -> workstations.ListUsableWorkstationsResponse: + """Post-rpc interceptor for list_usable_workstations + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_list_workstation_clusters( + self, + request: workstations.ListWorkstationClustersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.ListWorkstationClustersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_workstation_clusters + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_list_workstation_clusters( + self, response: workstations.ListWorkstationClustersResponse + ) -> workstations.ListWorkstationClustersResponse: + """Post-rpc interceptor for list_workstation_clusters + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_list_workstation_configs( + self, + request: workstations.ListWorkstationConfigsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.ListWorkstationConfigsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_workstation_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_list_workstation_configs( + self, response: workstations.ListWorkstationConfigsResponse + ) -> workstations.ListWorkstationConfigsResponse: + """Post-rpc interceptor for list_workstation_configs + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_list_workstations( + self, + request: workstations.ListWorkstationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.ListWorkstationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_workstations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_list_workstations( + self, response: workstations.ListWorkstationsResponse + ) -> workstations.ListWorkstationsResponse: + """Post-rpc interceptor for list_workstations + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_start_workstation( + self, + request: workstations.StartWorkstationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.StartWorkstationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for start_workstation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_start_workstation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for start_workstation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_stop_workstation( + self, + request: workstations.StopWorkstationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.StopWorkstationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for stop_workstation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_stop_workstation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for stop_workstation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_update_workstation( + self, + request: workstations.UpdateWorkstationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.UpdateWorkstationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_workstation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_update_workstation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_workstation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_update_workstation_cluster( + self, + request: workstations.UpdateWorkstationClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.UpdateWorkstationClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_workstation_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_update_workstation_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_workstation_cluster + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_update_workstation_config( + self, + request: workstations.UpdateWorkstationConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[workstations.UpdateWorkstationConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_workstation_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_update_workstation_config( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_workstation_config + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Workstations server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Workstations server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class WorkstationsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: WorkstationsRestInterceptor + + +class WorkstationsRestTransport(WorkstationsTransport): + """REST backend transport for Workstations. + + Service for interacting with Cloud Workstations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "workstations.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[WorkstationsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or WorkstationsRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateWorkstation(WorkstationsRestStub): + def __hash__(self): + return hash("CreateWorkstation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "workstationId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.CreateWorkstationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create workstation method over HTTP. + + Args: + request (~.workstations.CreateWorkstationRequest): + The request object. Message for creating a + CreateWorkstation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}/workstations", + "body": "workstation", + }, + ] + request, metadata = self._interceptor.pre_create_workstation( + request, metadata + ) + pb_request = workstations.CreateWorkstationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_workstation(resp) + return resp + + class _CreateWorkstationCluster(WorkstationsRestStub): + def __hash__(self): + return hash("CreateWorkstationCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "workstationClusterId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.CreateWorkstationClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create workstation + cluster method over HTTP. + + Args: + request (~.workstations.CreateWorkstationClusterRequest): + The request object. Message for creating a + CreateWorkstationCluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*}/workstationClusters", + "body": "workstation_cluster", + }, + ] + request, metadata = self._interceptor.pre_create_workstation_cluster( + request, metadata + ) + pb_request = workstations.CreateWorkstationClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_workstation_cluster(resp) + return resp + + class _CreateWorkstationConfig(WorkstationsRestStub): + def __hash__(self): + return hash("CreateWorkstationConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "workstationConfigId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.CreateWorkstationConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create workstation config method over HTTP. + + Args: + request (~.workstations.CreateWorkstationConfigRequest): + The request object. Message for creating a + CreateWorkstationConfig. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/workstationClusters/*}/workstationConfigs", + "body": "workstation_config", + }, + ] + request, metadata = self._interceptor.pre_create_workstation_config( + request, metadata + ) + pb_request = workstations.CreateWorkstationConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_workstation_config(resp) + return resp + + class _DeleteWorkstation(WorkstationsRestStub): + def __hash__(self): + return hash("DeleteWorkstation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.DeleteWorkstationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete workstation method over HTTP. + + Args: + request (~.workstations.DeleteWorkstationRequest): + The request object. Request message for + DeleteWorkstation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_workstation( + request, metadata + ) + pb_request = workstations.DeleteWorkstationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_workstation(resp) + return resp + + class _DeleteWorkstationCluster(WorkstationsRestStub): + def __hash__(self): + return hash("DeleteWorkstationCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.DeleteWorkstationClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete workstation + cluster method over HTTP. + + Args: + request (~.workstations.DeleteWorkstationClusterRequest): + The request object. Message for deleting a workstation + cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/workstationClusters/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_workstation_cluster( + request, metadata + ) + pb_request = workstations.DeleteWorkstationClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_workstation_cluster(resp) + return resp + + class _DeleteWorkstationConfig(WorkstationsRestStub): + def __hash__(self): + return hash("DeleteWorkstationConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.DeleteWorkstationConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete workstation config method over HTTP. + + Args: + request (~.workstations.DeleteWorkstationConfigRequest): + The request object. Message for deleting a workstation + configuration. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_workstation_config( + request, metadata + ) + pb_request = workstations.DeleteWorkstationConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_workstation_config(resp) + return resp + + class _GenerateAccessToken(WorkstationsRestStub): + def __hash__(self): + return hash("GenerateAccessToken") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.GenerateAccessTokenRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.GenerateAccessTokenResponse: + r"""Call the generate access token method over HTTP. + + Args: + request (~.workstations.GenerateAccessTokenRequest): + The request object. Request message for + GenerateAccessToken. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.GenerateAccessTokenResponse: + Response message for + GenerateAccessToken. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{workstation=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:generateAccessToken", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_generate_access_token( + request, metadata + ) + pb_request = workstations.GenerateAccessTokenRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.GenerateAccessTokenResponse() + pb_resp = workstations.GenerateAccessTokenResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_access_token(resp) + return resp + + class _GetWorkstation(WorkstationsRestStub): + def __hash__(self): + return hash("GetWorkstation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.GetWorkstationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.Workstation: + r"""Call the get workstation method over HTTP. + + Args: + request (~.workstations.GetWorkstationRequest): + The request object. Request message for GetWorkstation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.Workstation: + A single instance of a developer + workstation with its own persistent + storage. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_workstation(request, metadata) + pb_request = workstations.GetWorkstationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.Workstation() + pb_resp = workstations.Workstation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_workstation(resp) + return resp + + class _GetWorkstationCluster(WorkstationsRestStub): + def __hash__(self): + return hash("GetWorkstationCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.GetWorkstationClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.WorkstationCluster: + r"""Call the get workstation cluster method over HTTP. + + Args: + request (~.workstations.GetWorkstationClusterRequest): + The request object. Request message for + GetWorkstationCluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.WorkstationCluster: + A grouping of workstation + configurations and the associated + workstations in that region. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/workstationClusters/*}", + }, + ] + request, metadata = self._interceptor.pre_get_workstation_cluster( + request, metadata + ) + pb_request = workstations.GetWorkstationClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.WorkstationCluster() + pb_resp = workstations.WorkstationCluster.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_workstation_cluster(resp) + return resp + + class _GetWorkstationConfig(WorkstationsRestStub): + def __hash__(self): + return hash("GetWorkstationConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.GetWorkstationConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.WorkstationConfig: + r"""Call the get workstation config method over HTTP. + + Args: + request (~.workstations.GetWorkstationConfigRequest): + The request object. Request message for + GetWorkstationConfig. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.WorkstationConfig: + A set of configuration options + describing how a workstation will be + run. Workstation configurations are + intended to be shared across multiple + workstations. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_workstation_config( + request, metadata + ) + pb_request = workstations.GetWorkstationConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.WorkstationConfig() + pb_resp = workstations.WorkstationConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_workstation_config(resp) + return resp + + class _ListUsableWorkstationConfigs(WorkstationsRestStub): + def __hash__(self): + return hash("ListUsableWorkstationConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.ListUsableWorkstationConfigsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.ListUsableWorkstationConfigsResponse: + r"""Call the list usable workstation + configs method over HTTP. + + Args: + request (~.workstations.ListUsableWorkstationConfigsRequest): + The request object. Request message for + ListUsableWorkstationConfigs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.ListUsableWorkstationConfigsResponse: + Response message for + ListUsableWorkstationConfigs. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/workstationClusters/*}/workstationConfigs:listUsable", + }, + ] + request, metadata = self._interceptor.pre_list_usable_workstation_configs( + request, metadata + ) + pb_request = workstations.ListUsableWorkstationConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.ListUsableWorkstationConfigsResponse() + pb_resp = workstations.ListUsableWorkstationConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_usable_workstation_configs(resp) + return resp + + class _ListUsableWorkstations(WorkstationsRestStub): + def __hash__(self): + return hash("ListUsableWorkstations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.ListUsableWorkstationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.ListUsableWorkstationsResponse: + r"""Call the list usable workstations method over HTTP. + + Args: + request (~.workstations.ListUsableWorkstationsRequest): + The request object. Request message for + ListUsableWorkstations. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.ListUsableWorkstationsResponse: + Response message for + ListUsableWorkstations. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}/workstations:listUsable", + }, + ] + request, metadata = self._interceptor.pre_list_usable_workstations( + request, metadata + ) + pb_request = workstations.ListUsableWorkstationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.ListUsableWorkstationsResponse() + pb_resp = workstations.ListUsableWorkstationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_usable_workstations(resp) + return resp + + class _ListWorkstationClusters(WorkstationsRestStub): + def __hash__(self): + return hash("ListWorkstationClusters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.ListWorkstationClustersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.ListWorkstationClustersResponse: + r"""Call the list workstation clusters method over HTTP. + + Args: + request (~.workstations.ListWorkstationClustersRequest): + The request object. Request message for + ListWorkstationClusters. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.ListWorkstationClustersResponse: + Response message for + ListWorkstationClusters. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*}/workstationClusters", + }, + ] + request, metadata = self._interceptor.pre_list_workstation_clusters( + request, metadata + ) + pb_request = workstations.ListWorkstationClustersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.ListWorkstationClustersResponse() + pb_resp = workstations.ListWorkstationClustersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_workstation_clusters(resp) + return resp + + class _ListWorkstationConfigs(WorkstationsRestStub): + def __hash__(self): + return hash("ListWorkstationConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.ListWorkstationConfigsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.ListWorkstationConfigsResponse: + r"""Call the list workstation configs method over HTTP. + + Args: + request (~.workstations.ListWorkstationConfigsRequest): + The request object. Request message for + ListWorkstationConfigs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.ListWorkstationConfigsResponse: + Response message for + ListWorkstationConfigs. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/workstationClusters/*}/workstationConfigs", + }, + ] + request, metadata = self._interceptor.pre_list_workstation_configs( + request, metadata + ) + pb_request = workstations.ListWorkstationConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.ListWorkstationConfigsResponse() + pb_resp = workstations.ListWorkstationConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_workstation_configs(resp) + return resp + + class _ListWorkstations(WorkstationsRestStub): + def __hash__(self): + return hash("ListWorkstations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.ListWorkstationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workstations.ListWorkstationsResponse: + r"""Call the list workstations method over HTTP. + + Args: + request (~.workstations.ListWorkstationsRequest): + The request object. Request message for ListWorkstations. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workstations.ListWorkstationsResponse: + Response message for + ListWorkstations. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}/workstations", + }, + ] + request, metadata = self._interceptor.pre_list_workstations( + request, metadata + ) + pb_request = workstations.ListWorkstationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workstations.ListWorkstationsResponse() + pb_resp = workstations.ListWorkstationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_workstations(resp) + return resp + + class _StartWorkstation(WorkstationsRestStub): + def __hash__(self): + return hash("StartWorkstation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.StartWorkstationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the start workstation method over HTTP. + + Args: + request (~.workstations.StartWorkstationRequest): + The request object. Request message for StartWorkstation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:start", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_start_workstation( + request, metadata + ) + pb_request = workstations.StartWorkstationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_start_workstation(resp) + return resp + + class _StopWorkstation(WorkstationsRestStub): + def __hash__(self): + return hash("StopWorkstation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.StopWorkstationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the stop workstation method over HTTP. + + Args: + request (~.workstations.StopWorkstationRequest): + The request object. Request message for StopWorkstation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:stop", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_stop_workstation( + request, metadata + ) + pb_request = workstations.StopWorkstationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_stop_workstation(resp) + return resp + + class _UpdateWorkstation(WorkstationsRestStub): + def __hash__(self): + return hash("UpdateWorkstation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.UpdateWorkstationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update workstation method over HTTP. + + Args: + request (~.workstations.UpdateWorkstationRequest): + The request object. Request message for + UpdateWorkstation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{workstation.name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}", + "body": "workstation", + }, + ] + request, metadata = self._interceptor.pre_update_workstation( + request, metadata + ) + pb_request = workstations.UpdateWorkstationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_workstation(resp) + return resp + + class _UpdateWorkstationCluster(WorkstationsRestStub): + def __hash__(self): + return hash("UpdateWorkstationCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.UpdateWorkstationClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update workstation + cluster method over HTTP. + + Args: + request (~.workstations.UpdateWorkstationClusterRequest): + The request object. Request message for + UpdateWorkstationCluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{workstation_cluster.name=projects/*/locations/*/workstationClusters/*}", + "body": "workstation_cluster", + }, + ] + request, metadata = self._interceptor.pre_update_workstation_cluster( + request, metadata + ) + pb_request = workstations.UpdateWorkstationClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_workstation_cluster(resp) + return resp + + class _UpdateWorkstationConfig(WorkstationsRestStub): + def __hash__(self): + return hash("UpdateWorkstationConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workstations.UpdateWorkstationConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update workstation config method over HTTP. + + Args: + request (~.workstations.UpdateWorkstationConfigRequest): + The request object. Request message for + UpdateWorkstationConfig. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{workstation_config.name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}", + "body": "workstation_config", + }, + ] + request, metadata = self._interceptor.pre_update_workstation_config( + request, metadata + ) + pb_request = workstations.UpdateWorkstationConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_workstation_config(resp) + return resp + + @property + def create_workstation( + self, + ) -> Callable[[workstations.CreateWorkstationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateWorkstation(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_workstation_cluster( + self, + ) -> Callable[ + [workstations.CreateWorkstationClusterRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateWorkstationCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_workstation_config( + self, + ) -> Callable[ + [workstations.CreateWorkstationConfigRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateWorkstationConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_workstation( + self, + ) -> Callable[[workstations.DeleteWorkstationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteWorkstation(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_workstation_cluster( + self, + ) -> Callable[ + [workstations.DeleteWorkstationClusterRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteWorkstationCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_workstation_config( + self, + ) -> Callable[ + [workstations.DeleteWorkstationConfigRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteWorkstationConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_access_token( + self, + ) -> Callable[ + [workstations.GenerateAccessTokenRequest], + workstations.GenerateAccessTokenResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateAccessToken(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_workstation( + self, + ) -> Callable[[workstations.GetWorkstationRequest], workstations.Workstation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetWorkstation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_workstation_cluster( + self, + ) -> Callable[ + [workstations.GetWorkstationClusterRequest], workstations.WorkstationCluster + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetWorkstationCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_workstation_config( + self, + ) -> Callable[ + [workstations.GetWorkstationConfigRequest], workstations.WorkstationConfig + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetWorkstationConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_usable_workstation_configs( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationConfigsRequest], + workstations.ListUsableWorkstationConfigsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListUsableWorkstationConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_usable_workstations( + self, + ) -> Callable[ + [workstations.ListUsableWorkstationsRequest], + workstations.ListUsableWorkstationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListUsableWorkstations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_workstation_clusters( + self, + ) -> Callable[ + [workstations.ListWorkstationClustersRequest], + workstations.ListWorkstationClustersResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListWorkstationClusters(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_workstation_configs( + self, + ) -> Callable[ + [workstations.ListWorkstationConfigsRequest], + workstations.ListWorkstationConfigsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListWorkstationConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_workstations( + self, + ) -> Callable[ + [workstations.ListWorkstationsRequest], workstations.ListWorkstationsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListWorkstations(self._session, self._host, self._interceptor) # type: ignore + + @property + def start_workstation( + self, + ) -> Callable[[workstations.StartWorkstationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StartWorkstation(self._session, self._host, self._interceptor) # type: ignore + + @property + def stop_workstation( + self, + ) -> Callable[[workstations.StopWorkstationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StopWorkstation(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_workstation( + self, + ) -> Callable[[workstations.UpdateWorkstationRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateWorkstation(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_workstation_cluster( + self, + ) -> Callable[ + [workstations.UpdateWorkstationClusterRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateWorkstationCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_workstation_config( + self, + ) -> Callable[ + [workstations.UpdateWorkstationConfigRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateWorkstationConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(WorkstationsRestStub): + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{resource=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1beta/{resource=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:getIamPolicy", + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(WorkstationsRestStub): + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{resource=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{resource=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:setIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(WorkstationsRestStub): + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{resource=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{resource=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:testIamPermissions", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(WorkstationsRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(WorkstationsRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(WorkstationsRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(WorkstationsRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("WorkstationsRestTransport",) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/types/__init__.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/types/__init__.py new file mode 100644 index 000000000000..2ed11b1f4e97 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/types/__init__.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .workstations import ( + CreateWorkstationClusterRequest, + CreateWorkstationConfigRequest, + CreateWorkstationRequest, + DeleteWorkstationClusterRequest, + DeleteWorkstationConfigRequest, + DeleteWorkstationRequest, + GenerateAccessTokenRequest, + GenerateAccessTokenResponse, + GetWorkstationClusterRequest, + GetWorkstationConfigRequest, + GetWorkstationRequest, + ListUsableWorkstationConfigsRequest, + ListUsableWorkstationConfigsResponse, + ListUsableWorkstationsRequest, + ListUsableWorkstationsResponse, + ListWorkstationClustersRequest, + ListWorkstationClustersResponse, + ListWorkstationConfigsRequest, + ListWorkstationConfigsResponse, + ListWorkstationsRequest, + ListWorkstationsResponse, + OperationMetadata, + StartWorkstationRequest, + StopWorkstationRequest, + UpdateWorkstationClusterRequest, + UpdateWorkstationConfigRequest, + UpdateWorkstationRequest, + Workstation, + WorkstationCluster, + WorkstationConfig, +) + +__all__ = ( + "CreateWorkstationClusterRequest", + "CreateWorkstationConfigRequest", + "CreateWorkstationRequest", + "DeleteWorkstationClusterRequest", + "DeleteWorkstationConfigRequest", + "DeleteWorkstationRequest", + "GenerateAccessTokenRequest", + "GenerateAccessTokenResponse", + "GetWorkstationClusterRequest", + "GetWorkstationConfigRequest", + "GetWorkstationRequest", + "ListUsableWorkstationConfigsRequest", + "ListUsableWorkstationConfigsResponse", + "ListUsableWorkstationsRequest", + "ListUsableWorkstationsResponse", + "ListWorkstationClustersRequest", + "ListWorkstationClustersResponse", + "ListWorkstationConfigsRequest", + "ListWorkstationConfigsResponse", + "ListWorkstationsRequest", + "ListWorkstationsResponse", + "OperationMetadata", + "StartWorkstationRequest", + "StopWorkstationRequest", + "UpdateWorkstationClusterRequest", + "UpdateWorkstationConfigRequest", + "UpdateWorkstationRequest", + "Workstation", + "WorkstationCluster", + "WorkstationConfig", +) diff --git a/packages/google-cloud-workstations/google/cloud/workstations_v1beta/types/workstations.py b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/types/workstations.py new file mode 100644 index 000000000000..e994acef6b44 --- /dev/null +++ b/packages/google-cloud-workstations/google/cloud/workstations_v1beta/types/workstations.py @@ -0,0 +1,1795 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.workstations.v1beta", + manifest={ + "WorkstationCluster", + "WorkstationConfig", + "Workstation", + "GetWorkstationClusterRequest", + "ListWorkstationClustersRequest", + "ListWorkstationClustersResponse", + "CreateWorkstationClusterRequest", + "UpdateWorkstationClusterRequest", + "DeleteWorkstationClusterRequest", + "GetWorkstationConfigRequest", + "ListWorkstationConfigsRequest", + "ListWorkstationConfigsResponse", + "ListUsableWorkstationConfigsRequest", + "ListUsableWorkstationConfigsResponse", + "CreateWorkstationConfigRequest", + "UpdateWorkstationConfigRequest", + "DeleteWorkstationConfigRequest", + "GetWorkstationRequest", + "ListWorkstationsRequest", + "ListWorkstationsResponse", + "ListUsableWorkstationsRequest", + "ListUsableWorkstationsResponse", + "CreateWorkstationRequest", + "UpdateWorkstationRequest", + "DeleteWorkstationRequest", + "StartWorkstationRequest", + "StopWorkstationRequest", + "GenerateAccessTokenRequest", + "GenerateAccessTokenResponse", + "OperationMetadata", + }, +) + + +class WorkstationCluster(proto.Message): + r"""A grouping of workstation configurations and the associated + workstations in that region. + + Attributes: + name (str): + Full name of this resource. + display_name (str): + Human-readable name for this resource. + uid (str): + Output only. A system-assigned unique + identified for this resource. + reconciling (bool): + Output only. Indicates whether this resource + is currently being updated to match its intended + state. + annotations (MutableMapping[str, str]): + Client-specified annotations. + labels (MutableMapping[str, str]): + Client-specified labels that are applied to + the resource and that are also propagated to the + underlying Compute Engine resources. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was most + recently updated. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was + soft-deleted. + etag (str): + Checksum computed by the server. May be sent + on update and delete requests to make sure that + the client has an up-to-date value before + proceeding. + network (str): + Immutable. Name of the Compute Engine network + in which instances associated with this cluster + will be created. + subnetwork (str): + Immutable. Name of the Compute Engine + subnetwork in which instances associated with + this cluster will be created. Must be part of + the subnetwork specified for this cluster. + control_plane_ip (str): + Output only. The private IP address of the + control plane for this cluster. Workstation VMs + need access to this IP address to work with the + service, so make sure that your firewall rules + allow egress from the workstation VMs to this + address. + private_cluster_config (google.cloud.workstations_v1beta.types.WorkstationCluster.PrivateClusterConfig): + Configuration for private cluster. + degraded (bool): + Output only. Whether this resource is in degraded mode, in + which case it may require user action to restore full + functionality. Details can be found in the ``conditions`` + field. + conditions (MutableSequence[google.rpc.status_pb2.Status]): + Output only. Status conditions describing the + current resource state. + """ + + class PrivateClusterConfig(proto.Message): + r"""Configuration options for private clusters. + + Attributes: + enable_private_endpoint (bool): + Immutable. Whether Workstations endpoint is + private. + cluster_hostname (str): + Output only. Hostname for the workstation + cluster. This field will be populated only when + private endpoint is enabled. To access + workstations in the cluster, create a new DNS + zone mapping this domain name to an internal IP + address and a forwarding rule mapping that + address to the service attachment. + service_attachment_uri (str): + Output only. Service attachment URI for the workstation + cluster. The service attachemnt is created when private + endpoint is enabled. To access workstations in the cluster, + configure access to the managed service using `Private + Service + Connect `__. + allowed_projects (MutableSequence[str]): + Additional projects that are allowed to + attach to the workstation cluster's service + attachment. By default, the workstation + cluster's project and the VPC host project (if + different) are allowed. + """ + + enable_private_endpoint: bool = proto.Field( + proto.BOOL, + number=1, + ) + cluster_hostname: str = proto.Field( + proto.STRING, + number=2, + ) + service_attachment_uri: str = proto.Field( + proto.STRING, + number=3, + ) + allowed_projects: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=4, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=15, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + ) + network: str = proto.Field( + proto.STRING, + number=10, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=11, + ) + control_plane_ip: str = proto.Field( + proto.STRING, + number=16, + ) + private_cluster_config: PrivateClusterConfig = proto.Field( + proto.MESSAGE, + number=12, + message=PrivateClusterConfig, + ) + degraded: bool = proto.Field( + proto.BOOL, + number=13, + ) + conditions: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message=status_pb2.Status, + ) + + +class WorkstationConfig(proto.Message): + r"""A set of configuration options describing how a workstation + will be run. Workstation configurations are intended to be + shared across multiple workstations. + + Attributes: + name (str): + Full name of this resource. + display_name (str): + Human-readable name for this resource. + uid (str): + Output only. A system-assigned unique + identified for this resource. + reconciling (bool): + Output only. Indicates whether this resource + is currently being updated to match its intended + state. + annotations (MutableMapping[str, str]): + Client-specified annotations. + labels (MutableMapping[str, str]): + Client-specified labels that are applied to + the resource and that are also propagated to the + underlying Compute Engine resources. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was most + recently updated. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was + soft-deleted. + etag (str): + Checksum computed by the server. May be sent + on update and delete requests to make sure that + the client has an up-to-date value before + proceeding. + idle_timeout (google.protobuf.duration_pb2.Duration): + How long to wait before automatically + stopping an instance that hasn't received any + user traffic. A value of 0 indicates that this + instance should never time out due to idleness. + Defaults to 20 minutes. + running_timeout (google.protobuf.duration_pb2.Duration): + How long to wait before automatically stopping a workstation + after it started. A value of 0 indicates that workstations + using this configuration should never time out. Must be + greater than 0 and less than 24 hours if encryption_key is + set. Defaults to 12 hours. + host (google.cloud.workstations_v1beta.types.WorkstationConfig.Host): + Runtime host for the workstation. + persistent_directories (MutableSequence[google.cloud.workstations_v1beta.types.WorkstationConfig.PersistentDirectory]): + Directories to persist across workstation + sessions. + container (google.cloud.workstations_v1beta.types.WorkstationConfig.Container): + Container that will be run for each + workstation using this configuration when that + workstation is started. + encryption_key (google.cloud.workstations_v1beta.types.WorkstationConfig.CustomerEncryptionKey): + Immutable. Encrypts resources of this + workstation configuration using a + customer-managed encryption key. + If specified, the boot disk of the Compute + Engine instance and the persistent disk are + encrypted using this encryption key. If this + field is not set, the disks are encrypted using + a generated key. Customer-managed encryption + keys do not protect disk metadata. + If the customer-managed encryption key is + rotated, when the workstation instance is + stopped, the system attempts to recreate the + persistent disk with the new version of the key. + Be sure to keep older versions of the key until + the persistent disk is recreated. Otherwise, + data on the persistent disk will be lost. + If the encryption key is revoked, the + workstation session will automatically be + stopped within 7 hours. + + Immutable after the workstation configuration is + created. + readiness_checks (MutableSequence[google.cloud.workstations_v1beta.types.WorkstationConfig.ReadinessCheck]): + Readiness checks to perform when starting a + workstation using this workstation + configuration. Mark a workstation as running + only after all specified readiness checks return + 200 status codes. + degraded (bool): + Output only. Whether this resource is in degraded mode, in + which case it may require user action to restore full + functionality. Details can be found in the ``conditions`` + field. + conditions (MutableSequence[google.rpc.status_pb2.Status]): + Output only. Status conditions describing the + current resource state. + enable_audit_agent (bool): + Whether to enable linux auditd logging on the workstation. + When enabled, a service account must also be specified that + has logging.buckets.write permission on the project. + Operating system audit logging is distinct from `Cloud Audit + Logs `__. + """ + + class Host(proto.Message): + r"""Runtime host for a workstation. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gce_instance (google.cloud.workstations_v1beta.types.WorkstationConfig.Host.GceInstance): + Specifies a Compute Engine instance as the + host. + + This field is a member of `oneof`_ ``config``. + """ + + class GceInstance(proto.Message): + r"""A runtime using a Compute Engine instance. + + Attributes: + machine_type (str): + The name of a Compute Engine machine type. + service_account (str): + Email address of the service account used on + VM instances used to support this configuration. + If not set, VMs run with a Google-managed + service account. This service account must have + permission to pull the specified container + image; otherwise, the image must be publicly + accessible. + tags (MutableSequence[str]): + Network tags to add to the Compute Engine + machines backing the Workstations. + pool_size (int): + Number of instances to pool for faster + workstation startup. + pooled_instances (int): + Output only. Number of instances currently + available in the pool for faster workstation + startup. + disable_public_ip_addresses (bool): + Whether instances have no public IP address. + shielded_instance_config (google.cloud.workstations_v1beta.types.WorkstationConfig.Host.GceInstance.GceShieldedInstanceConfig): + A set of Compute Engine Shielded instance + options. + confidential_instance_config (google.cloud.workstations_v1beta.types.WorkstationConfig.Host.GceInstance.GceConfidentialInstanceConfig): + A set of Compute Engine Confidential VM + instance options. + boot_disk_size_gb (int): + Size of the boot disk in GB. Defaults to 50. + accelerators (MutableSequence[google.cloud.workstations_v1beta.types.WorkstationConfig.Host.GceInstance.Accelerator]): + A list of the type and count of accelerator + cards attached to the instance. + """ + + class GceShieldedInstanceConfig(proto.Message): + r"""A set of Compute Engine Shielded instance options. + + Attributes: + enable_secure_boot (bool): + Whether the instance has Secure Boot enabled. + enable_vtpm (bool): + Whether the instance has the vTPM enabled. + enable_integrity_monitoring (bool): + Whether the instance has integrity monitoring + enabled. + """ + + enable_secure_boot: bool = proto.Field( + proto.BOOL, + number=1, + ) + enable_vtpm: bool = proto.Field( + proto.BOOL, + number=2, + ) + enable_integrity_monitoring: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class GceConfidentialInstanceConfig(proto.Message): + r"""A set of Compute Engine Confidential VM instance options. + + Attributes: + enable_confidential_compute (bool): + Whether the instance has confidential compute + enabled. + """ + + enable_confidential_compute: bool = proto.Field( + proto.BOOL, + number=1, + ) + + class Accelerator(proto.Message): + r"""An accelerator card attached to the instance. + + Attributes: + type_ (str): + Type of accelerator resource to attach to the + instance, for example, "nvidia-tesla-p100". + count (int): + Number of accelerator cards exposed to the + instance. + """ + + type_: str = proto.Field( + proto.STRING, + number=1, + ) + count: int = proto.Field( + proto.INT32, + number=2, + ) + + machine_type: str = proto.Field( + proto.STRING, + number=1, + ) + service_account: str = proto.Field( + proto.STRING, + number=2, + ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + pool_size: int = proto.Field( + proto.INT32, + number=5, + ) + pooled_instances: int = proto.Field( + proto.INT32, + number=12, + ) + disable_public_ip_addresses: bool = proto.Field( + proto.BOOL, + number=6, + ) + shielded_instance_config: "WorkstationConfig.Host.GceInstance.GceShieldedInstanceConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="WorkstationConfig.Host.GceInstance.GceShieldedInstanceConfig", + ) + confidential_instance_config: "WorkstationConfig.Host.GceInstance.GceConfidentialInstanceConfig" = proto.Field( + proto.MESSAGE, + number=10, + message="WorkstationConfig.Host.GceInstance.GceConfidentialInstanceConfig", + ) + boot_disk_size_gb: int = proto.Field( + proto.INT32, + number=9, + ) + accelerators: MutableSequence[ + "WorkstationConfig.Host.GceInstance.Accelerator" + ] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message="WorkstationConfig.Host.GceInstance.Accelerator", + ) + + gce_instance: "WorkstationConfig.Host.GceInstance" = proto.Field( + proto.MESSAGE, + number=1, + oneof="config", + message="WorkstationConfig.Host.GceInstance", + ) + + class PersistentDirectory(proto.Message): + r"""A directory to persist across workstation sessions. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + mount_path (str): + Location of this directory in the running + workstation. + gce_pd (google.cloud.workstations_v1beta.types.WorkstationConfig.PersistentDirectory.GceRegionalPersistentDisk): + A PersistentDirectory backed by a Compute + Engine persistent disk. + + This field is a member of `oneof`_ ``directory_type``. + """ + + class GceRegionalPersistentDisk(proto.Message): + r"""A PersistentDirectory backed by a Compute Engine regional + persistent disk. + + Attributes: + size_gb (int): + Size of the disk in GB. Must be empty if source_snapshot is + set. Defaults to 200. + fs_type (str): + Type of file system that the disk should be formatted with. + The workstation image must support this file system type. + Must be empty if source_snapshot is set. Defaults to ext4. + disk_type (str): + Type of the disk to use. Defaults to + pd-standard. + source_snapshot (str): + Name of the snapshot to use as the source for the disk. If + set, size_gb and fs_type must be empty. + reclaim_policy (google.cloud.workstations_v1beta.types.WorkstationConfig.PersistentDirectory.GceRegionalPersistentDisk.ReclaimPolicy): + What should happen to the disk after the + workstation is deleted. Defaults to DELETE. + """ + + class ReclaimPolicy(proto.Enum): + r"""Value representing what should happen to the disk after the + workstation is deleted. + + Values: + RECLAIM_POLICY_UNSPECIFIED (0): + Do not use. + DELETE (1): + The persistent disk will be deleted with the + workstation. + RETAIN (2): + The persistent disk will be remain after the + workstation is deleted, and the administrator + must manually delete the disk. + """ + RECLAIM_POLICY_UNSPECIFIED = 0 + DELETE = 1 + RETAIN = 2 + + size_gb: int = proto.Field( + proto.INT32, + number=1, + ) + fs_type: str = proto.Field( + proto.STRING, + number=2, + ) + disk_type: str = proto.Field( + proto.STRING, + number=3, + ) + source_snapshot: str = proto.Field( + proto.STRING, + number=5, + ) + reclaim_policy: "WorkstationConfig.PersistentDirectory.GceRegionalPersistentDisk.ReclaimPolicy" = proto.Field( + proto.ENUM, + number=4, + enum="WorkstationConfig.PersistentDirectory.GceRegionalPersistentDisk.ReclaimPolicy", + ) + + mount_path: str = proto.Field( + proto.STRING, + number=1, + ) + gce_pd: "WorkstationConfig.PersistentDirectory.GceRegionalPersistentDisk" = proto.Field( + proto.MESSAGE, + number=2, + oneof="directory_type", + message="WorkstationConfig.PersistentDirectory.GceRegionalPersistentDisk", + ) + + class Container(proto.Message): + r"""A Docker container. + + Attributes: + image (str): + Docker image defining the container. This + image must be accessible by the service account + specified in the workstation configuration. + command (MutableSequence[str]): + If set, overrides the default ENTRYPOINT + specified by the image. + args (MutableSequence[str]): + Arguments passed to the entrypoint. + env (MutableMapping[str, str]): + Environment variables passed to the + container's entrypoint. + working_dir (str): + If set, overrides the default DIR specified + by the image. + run_as_user (int): + If set, overrides the USER specified in the + image with the given uid. + """ + + image: str = proto.Field( + proto.STRING, + number=1, + ) + command: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + env: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + working_dir: str = proto.Field( + proto.STRING, + number=5, + ) + run_as_user: int = proto.Field( + proto.INT32, + number=6, + ) + + class CustomerEncryptionKey(proto.Message): + r"""A customer-managed encryption key for the Compute Engine + resources of this workstation configuration. + + Attributes: + kms_key (str): + Immutable. The name of the Google Cloud KMS encryption key. + For example, + ``projects/PROJECT_ID/locations/REGION/keyRings/KEY_RING/cryptoKeys/KEY_NAME``. + kms_key_service_account (str): + Immutable. The service account to use with the specified KMS + key. We recommend that you use a separate service account + and follow KMS best practices. For more information, see + `Separation of + duties `__ + and ``gcloud kms keys add-iam-policy-binding`` + ```--member`` `__. + """ + + kms_key: str = proto.Field( + proto.STRING, + number=1, + ) + kms_key_service_account: str = proto.Field( + proto.STRING, + number=2, + ) + + class ReadinessCheck(proto.Message): + r"""A readiness check to be performed on a workstation. + + Attributes: + path (str): + Path to which the request should be sent. + port (int): + Port to which the request should be sent. + """ + + path: str = proto.Field( + proto.STRING, + number=1, + ) + port: int = proto.Field( + proto.INT32, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=4, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=18, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + ) + idle_timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=10, + message=duration_pb2.Duration, + ) + running_timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=11, + message=duration_pb2.Duration, + ) + host: Host = proto.Field( + proto.MESSAGE, + number=12, + message=Host, + ) + persistent_directories: MutableSequence[PersistentDirectory] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message=PersistentDirectory, + ) + container: Container = proto.Field( + proto.MESSAGE, + number=14, + message=Container, + ) + encryption_key: CustomerEncryptionKey = proto.Field( + proto.MESSAGE, + number=17, + message=CustomerEncryptionKey, + ) + readiness_checks: MutableSequence[ReadinessCheck] = proto.RepeatedField( + proto.MESSAGE, + number=19, + message=ReadinessCheck, + ) + degraded: bool = proto.Field( + proto.BOOL, + number=15, + ) + conditions: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=16, + message=status_pb2.Status, + ) + enable_audit_agent: bool = proto.Field( + proto.BOOL, + number=20, + ) + + +class Workstation(proto.Message): + r"""A single instance of a developer workstation with its own + persistent storage. + + Attributes: + name (str): + Full name of this resource. + display_name (str): + Human-readable name for this resource. + uid (str): + Output only. A system-assigned unique + identified for this resource. + reconciling (bool): + Output only. Indicates whether this resource + is currently being updated to match its intended + state. + annotations (MutableMapping[str, str]): + Client-specified annotations. + labels (MutableMapping[str, str]): + Client-specified labels that are applied to + the resource and that are also propagated to the + underlying Compute Engine resources. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was most + recently updated. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this resource was + soft-deleted. + etag (str): + Checksum computed by the server. May be sent + on update and delete requests to make sure that + the client has an up-to-date value before + proceeding. + state (google.cloud.workstations_v1beta.types.Workstation.State): + Output only. Current state of the + workstation. + host (str): + Output only. Host to which clients can send HTTPS traffic + that will be received by the workstation. Authorized traffic + will be received to the workstation as HTTP on port 80. To + send traffic to a different port, clients may prefix the + host with the destination port in the format + ``{port}-{host}``. + env (MutableMapping[str, str]): + Environment variables passed to the + workstation container's entrypoint. + """ + + class State(proto.Enum): + r"""Whether a workstation is running and ready to receive user + requests. + + Values: + STATE_UNSPECIFIED (0): + Do not use. + STATE_STARTING (1): + The workstation is not yet ready to accept + requests from users but will be soon. + STATE_RUNNING (2): + The workstation is ready to accept requests + from users. + STATE_STOPPING (3): + The workstation is being stopped. + STATE_STOPPED (4): + The workstation is stopped and will not be + able to receive requests until it is started. + """ + STATE_UNSPECIFIED = 0 + STATE_STARTING = 1 + STATE_RUNNING = 2 + STATE_STOPPING = 3 + STATE_STOPPED = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=4, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + ) + state: State = proto.Field( + proto.ENUM, + number=10, + enum=State, + ) + host: str = proto.Field( + proto.STRING, + number=11, + ) + env: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=12, + ) + + +class GetWorkstationClusterRequest(proto.Message): + r"""Request message for GetWorkstationCluster. + + Attributes: + name (str): + Required. Name of the requested resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListWorkstationClustersRequest(proto.Message): + r"""Request message for ListWorkstationClusters. + + Attributes: + parent (str): + Required. Parent resource name. + page_size (int): + Maximum number of items to return. + page_token (str): + next_page_token value returned from a previous List request, + if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListWorkstationClustersResponse(proto.Message): + r"""Response message for ListWorkstationClusters. + + Attributes: + workstation_clusters (MutableSequence[google.cloud.workstations_v1beta.types.WorkstationCluster]): + The requested workstation clusters. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Unreachable resources. + """ + + @property + def raw_page(self): + return self + + workstation_clusters: MutableSequence["WorkstationCluster"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="WorkstationCluster", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateWorkstationClusterRequest(proto.Message): + r"""Message for creating a CreateWorkstationCluster. + + Attributes: + parent (str): + Required. Parent resource name. + workstation_cluster_id (str): + Required. ID to use for the workstation + cluster. + workstation_cluster (google.cloud.workstations_v1beta.types.WorkstationCluster): + Required. Workstation cluster to create. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + workstation_cluster_id: str = proto.Field( + proto.STRING, + number=2, + ) + workstation_cluster: "WorkstationCluster" = proto.Field( + proto.MESSAGE, + number=3, + message="WorkstationCluster", + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateWorkstationClusterRequest(proto.Message): + r"""Request message for UpdateWorkstationCluster. + + Attributes: + workstation_cluster (google.cloud.workstations_v1beta.types.WorkstationCluster): + Required. Workstation cluster to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask that specifies which fields in + the workstation cluster should be updated. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + allow_missing (bool): + If set, and the workstation cluster is not found, a new + workstation cluster will be created. In this situation, + update_mask is ignored. + """ + + workstation_cluster: "WorkstationCluster" = proto.Field( + proto.MESSAGE, + number=1, + message="WorkstationCluster", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class DeleteWorkstationClusterRequest(proto.Message): + r"""Message for deleting a workstation cluster. + + Attributes: + name (str): + Required. Name of the workstation cluster to + delete. + validate_only (bool): + If set, validate the request and preview the + review, but do not apply it. + etag (str): + If set, the request will be rejected if the + latest version of the workstation cluster on the + server does not have this ETag. + force (bool): + If set, any workstation configurations and + workstations in the workstation cluster are also + deleted. Otherwise, the request only works if + the workstation cluster has no configurations or + workstations. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + force: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetWorkstationConfigRequest(proto.Message): + r"""Request message for GetWorkstationConfig. + + Attributes: + name (str): + Required. Name of the requested resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListWorkstationConfigsRequest(proto.Message): + r"""Request message for ListWorkstationConfigs. + + Attributes: + parent (str): + Required. Parent resource name. + page_size (int): + Maximum number of items to return. + page_token (str): + next_page_token value returned from a previous List request, + if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListWorkstationConfigsResponse(proto.Message): + r"""Response message for ListWorkstationConfigs. + + Attributes: + workstation_configs (MutableSequence[google.cloud.workstations_v1beta.types.WorkstationConfig]): + The requested configs. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Unreachable resources. + """ + + @property + def raw_page(self): + return self + + workstation_configs: MutableSequence["WorkstationConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="WorkstationConfig", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class ListUsableWorkstationConfigsRequest(proto.Message): + r"""Request message for ListUsableWorkstationConfigs. + + Attributes: + parent (str): + Required. Parent resource name. + page_size (int): + Maximum number of items to return. + page_token (str): + next_page_token value returned from a previous List request, + if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListUsableWorkstationConfigsResponse(proto.Message): + r"""Response message for ListUsableWorkstationConfigs. + + Attributes: + workstation_configs (MutableSequence[google.cloud.workstations_v1beta.types.WorkstationConfig]): + The requested configs. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Unreachable resources. + """ + + @property + def raw_page(self): + return self + + workstation_configs: MutableSequence["WorkstationConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="WorkstationConfig", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateWorkstationConfigRequest(proto.Message): + r"""Message for creating a CreateWorkstationConfig. + + Attributes: + parent (str): + Required. Parent resource name. + workstation_config_id (str): + Required. ID to use for the workstation + configuration. + workstation_config (google.cloud.workstations_v1beta.types.WorkstationConfig): + Required. Config to create. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + workstation_config_id: str = proto.Field( + proto.STRING, + number=2, + ) + workstation_config: "WorkstationConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="WorkstationConfig", + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateWorkstationConfigRequest(proto.Message): + r"""Request message for UpdateWorkstationConfig. + + Attributes: + workstation_config (google.cloud.workstations_v1beta.types.WorkstationConfig): + Required. Config to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask specifying which fields in the + workstation configuration should be updated. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + allow_missing (bool): + If set and the workstation configuration is not found, a new + workstation configuration will be created. In this + situation, update_mask is ignored. + """ + + workstation_config: "WorkstationConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="WorkstationConfig", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class DeleteWorkstationConfigRequest(proto.Message): + r"""Message for deleting a workstation configuration. + + Attributes: + name (str): + Required. Name of the workstation + configuration to delete. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + etag (str): + If set, the request is rejected if the latest + version of the workstation configuration on the + server does not have this ETag. + force (bool): + If set, any workstations in the workstation + configuration are also deleted. Otherwise, the + request works only if the workstation + configuration has no workstations. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + force: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetWorkstationRequest(proto.Message): + r"""Request message for GetWorkstation. + + Attributes: + name (str): + Required. Name of the requested resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListWorkstationsRequest(proto.Message): + r"""Request message for ListWorkstations. + + Attributes: + parent (str): + Required. Parent resource name. + page_size (int): + Maximum number of items to return. + page_token (str): + next_page_token value returned from a previous List request, + if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListWorkstationsResponse(proto.Message): + r"""Response message for ListWorkstations. + + Attributes: + workstations (MutableSequence[google.cloud.workstations_v1beta.types.Workstation]): + The requested workstations. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Unreachable resources. + """ + + @property + def raw_page(self): + return self + + workstations: MutableSequence["Workstation"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Workstation", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class ListUsableWorkstationsRequest(proto.Message): + r"""Request message for ListUsableWorkstations. + + Attributes: + parent (str): + Required. Parent resource name. + page_size (int): + Maximum number of items to return. + page_token (str): + next_page_token value returned from a previous List request, + if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListUsableWorkstationsResponse(proto.Message): + r"""Response message for ListUsableWorkstations. + + Attributes: + workstations (MutableSequence[google.cloud.workstations_v1beta.types.Workstation]): + The requested workstations. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Unreachable resources. + """ + + @property + def raw_page(self): + return self + + workstations: MutableSequence["Workstation"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Workstation", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateWorkstationRequest(proto.Message): + r"""Message for creating a CreateWorkstation. + + Attributes: + parent (str): + Required. Parent resource name. + workstation_id (str): + Required. ID to use for the workstation. + workstation (google.cloud.workstations_v1beta.types.Workstation): + Required. Workstation to create. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + workstation_id: str = proto.Field( + proto.STRING, + number=2, + ) + workstation: "Workstation" = proto.Field( + proto.MESSAGE, + number=3, + message="Workstation", + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateWorkstationRequest(proto.Message): + r"""Request message for UpdateWorkstation. + + Attributes: + workstation (google.cloud.workstations_v1beta.types.Workstation): + Required. Workstation to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask specifying which fields in the + workstation configuration should be updated. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + allow_missing (bool): + If set and the workstation configuration is not found, a new + workstation configuration is created. In this situation, + update_mask is ignored. + """ + + workstation: "Workstation" = proto.Field( + proto.MESSAGE, + number=1, + message="Workstation", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class DeleteWorkstationRequest(proto.Message): + r"""Request message for DeleteWorkstation. + + Attributes: + name (str): + Required. Name of the workstation to delete. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + etag (str): + If set, the request will be rejected if the + latest version of the workstation on the server + does not have this ETag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + + +class StartWorkstationRequest(proto.Message): + r"""Request message for StartWorkstation. + + Attributes: + name (str): + Required. Name of the workstation to start. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + etag (str): + If set, the request will be rejected if the + latest version of the workstation on the server + does not have this ETag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + + +class StopWorkstationRequest(proto.Message): + r"""Request message for StopWorkstation. + + Attributes: + name (str): + Required. Name of the workstation to stop. + validate_only (bool): + If set, validate the request and preview the + review, but do not actually apply it. + etag (str): + If set, the request will be rejected if the + latest version of the workstation on the server + does not have this ETag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GenerateAccessTokenRequest(proto.Message): + r"""Request message for GenerateAccessToken. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + workstation (str): + Required. Name of the workstation for which + the access token should be generated. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Desired expiration time of the access token. + This value must be at most 24 hours in the + future. If a value is not specified, the token's + expiration time will be set to a default value + of 1 hour in the future. + + This field is a member of `oneof`_ ``expiration``. + ttl (google.protobuf.duration_pb2.Duration): + Desired lifetime duration of the access + token. This value must be at most 24 hours. If a + value is not specified, the token's lifetime + will be set to a default value of 1 hour. + + This field is a member of `oneof`_ ``expiration``. + """ + + workstation: str = proto.Field( + proto.STRING, + number=1, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + oneof="expiration", + message=timestamp_pb2.Timestamp, + ) + ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + oneof="expiration", + message=duration_pb2.Duration, + ) + + +class GenerateAccessTokenResponse(proto.Message): + r"""Response message for GenerateAccessToken. + + Attributes: + access_token (str): + The generated bearer access token. To use this token, + include it in an Authorization header of an HTTP request + sent to the associated workstation's hostname—for example, + ``Authorization: Bearer ``. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Time at which the generated token will + expire. + """ + + access_token: str = proto.Field( + proto.STRING, + number=1, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class OperationMetadata(proto.Message): + r"""Metadata for long-running operations. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time that the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time that the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has + requested cancellation of the operation. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-workstations/mypy.ini b/packages/google-cloud-workstations/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-workstations/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-workstations/noxfile.py b/packages/google-cloud-workstations/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-cloud-workstations/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-cloud-workstations/renovate.json b/packages/google-cloud-workstations/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-workstations/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-workstations/samples/generated_samples/snippet_metadata_google.cloud.workstations.v1.json b/packages/google-cloud-workstations/samples/generated_samples/snippet_metadata_google.cloud.workstations.v1.json new file mode 100644 index 000000000000..3a77060922f2 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/snippet_metadata_google.cloud.workstations.v1.json @@ -0,0 +1,3307 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.workstations.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-workstations", + "version": "0.4.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.create_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.CreateWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "CreateWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.CreateWorkstationClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "workstation_cluster", + "type": "google.cloud.workstations_v1.types.WorkstationCluster" + }, + { + "name": "workstation_cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_workstation_cluster" + }, + "description": "Sample for CreateWorkstationCluster", + "file": "workstations_v1_generated_workstations_create_workstation_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_CreateWorkstationCluster_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_create_workstation_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.create_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.CreateWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "CreateWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.CreateWorkstationClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "workstation_cluster", + "type": "google.cloud.workstations_v1.types.WorkstationCluster" + }, + { + "name": "workstation_cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_workstation_cluster" + }, + "description": "Sample for CreateWorkstationCluster", + "file": "workstations_v1_generated_workstations_create_workstation_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_CreateWorkstationCluster_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_create_workstation_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.create_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.CreateWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "CreateWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.CreateWorkstationConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "workstation_config", + "type": "google.cloud.workstations_v1.types.WorkstationConfig" + }, + { + "name": "workstation_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_workstation_config" + }, + "description": "Sample for CreateWorkstationConfig", + "file": "workstations_v1_generated_workstations_create_workstation_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_CreateWorkstationConfig_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_create_workstation_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.create_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.CreateWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "CreateWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.CreateWorkstationConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "workstation_config", + "type": "google.cloud.workstations_v1.types.WorkstationConfig" + }, + { + "name": "workstation_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_workstation_config" + }, + "description": "Sample for CreateWorkstationConfig", + "file": "workstations_v1_generated_workstations_create_workstation_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_CreateWorkstationConfig_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_create_workstation_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.create_workstation", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.CreateWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "CreateWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.CreateWorkstationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "workstation", + "type": "google.cloud.workstations_v1.types.Workstation" + }, + { + "name": "workstation_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_workstation" + }, + "description": "Sample for CreateWorkstation", + "file": "workstations_v1_generated_workstations_create_workstation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_CreateWorkstation_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_create_workstation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.create_workstation", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.CreateWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "CreateWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.CreateWorkstationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "workstation", + "type": "google.cloud.workstations_v1.types.Workstation" + }, + { + "name": "workstation_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_workstation" + }, + "description": "Sample for CreateWorkstation", + "file": "workstations_v1_generated_workstations_create_workstation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_CreateWorkstation_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_create_workstation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.delete_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.DeleteWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "DeleteWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.DeleteWorkstationClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_workstation_cluster" + }, + "description": "Sample for DeleteWorkstationCluster", + "file": "workstations_v1_generated_workstations_delete_workstation_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_DeleteWorkstationCluster_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_delete_workstation_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.delete_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.DeleteWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "DeleteWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.DeleteWorkstationClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_workstation_cluster" + }, + "description": "Sample for DeleteWorkstationCluster", + "file": "workstations_v1_generated_workstations_delete_workstation_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_DeleteWorkstationCluster_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_delete_workstation_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.delete_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.DeleteWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "DeleteWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.DeleteWorkstationConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_workstation_config" + }, + "description": "Sample for DeleteWorkstationConfig", + "file": "workstations_v1_generated_workstations_delete_workstation_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_DeleteWorkstationConfig_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_delete_workstation_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.delete_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.DeleteWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "DeleteWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.DeleteWorkstationConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_workstation_config" + }, + "description": "Sample for DeleteWorkstationConfig", + "file": "workstations_v1_generated_workstations_delete_workstation_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_DeleteWorkstationConfig_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_delete_workstation_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.delete_workstation", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.DeleteWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "DeleteWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.DeleteWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_workstation" + }, + "description": "Sample for DeleteWorkstation", + "file": "workstations_v1_generated_workstations_delete_workstation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_DeleteWorkstation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_delete_workstation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.delete_workstation", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.DeleteWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "DeleteWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.DeleteWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_workstation" + }, + "description": "Sample for DeleteWorkstation", + "file": "workstations_v1_generated_workstations_delete_workstation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_DeleteWorkstation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_delete_workstation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.generate_access_token", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.GenerateAccessToken", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "GenerateAccessToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.GenerateAccessTokenRequest" + }, + { + "name": "workstation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.types.GenerateAccessTokenResponse", + "shortName": "generate_access_token" + }, + "description": "Sample for GenerateAccessToken", + "file": "workstations_v1_generated_workstations_generate_access_token_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_GenerateAccessToken_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_generate_access_token_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.generate_access_token", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.GenerateAccessToken", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "GenerateAccessToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.GenerateAccessTokenRequest" + }, + { + "name": "workstation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.types.GenerateAccessTokenResponse", + "shortName": "generate_access_token" + }, + "description": "Sample for GenerateAccessToken", + "file": "workstations_v1_generated_workstations_generate_access_token_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_GenerateAccessToken_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_generate_access_token_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.get_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.GetWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "GetWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.GetWorkstationClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.types.WorkstationCluster", + "shortName": "get_workstation_cluster" + }, + "description": "Sample for GetWorkstationCluster", + "file": "workstations_v1_generated_workstations_get_workstation_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_GetWorkstationCluster_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_get_workstation_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.get_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.GetWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "GetWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.GetWorkstationClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.types.WorkstationCluster", + "shortName": "get_workstation_cluster" + }, + "description": "Sample for GetWorkstationCluster", + "file": "workstations_v1_generated_workstations_get_workstation_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_GetWorkstationCluster_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_get_workstation_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.get_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.GetWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "GetWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.GetWorkstationConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.types.WorkstationConfig", + "shortName": "get_workstation_config" + }, + "description": "Sample for GetWorkstationConfig", + "file": "workstations_v1_generated_workstations_get_workstation_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_GetWorkstationConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_get_workstation_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.get_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.GetWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "GetWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.GetWorkstationConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.types.WorkstationConfig", + "shortName": "get_workstation_config" + }, + "description": "Sample for GetWorkstationConfig", + "file": "workstations_v1_generated_workstations_get_workstation_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_GetWorkstationConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_get_workstation_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.get_workstation", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.GetWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "GetWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.GetWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.types.Workstation", + "shortName": "get_workstation" + }, + "description": "Sample for GetWorkstation", + "file": "workstations_v1_generated_workstations_get_workstation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_GetWorkstation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_get_workstation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.get_workstation", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.GetWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "GetWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.GetWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.types.Workstation", + "shortName": "get_workstation" + }, + "description": "Sample for GetWorkstation", + "file": "workstations_v1_generated_workstations_get_workstation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_GetWorkstation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_get_workstation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.list_usable_workstation_configs", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.ListUsableWorkstationConfigs", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListUsableWorkstationConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.ListUsableWorkstationConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.services.workstations.pagers.ListUsableWorkstationConfigsAsyncPager", + "shortName": "list_usable_workstation_configs" + }, + "description": "Sample for ListUsableWorkstationConfigs", + "file": "workstations_v1_generated_workstations_list_usable_workstation_configs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_ListUsableWorkstationConfigs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_list_usable_workstation_configs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.list_usable_workstation_configs", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.ListUsableWorkstationConfigs", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListUsableWorkstationConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.ListUsableWorkstationConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.services.workstations.pagers.ListUsableWorkstationConfigsPager", + "shortName": "list_usable_workstation_configs" + }, + "description": "Sample for ListUsableWorkstationConfigs", + "file": "workstations_v1_generated_workstations_list_usable_workstation_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_ListUsableWorkstationConfigs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_list_usable_workstation_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.list_usable_workstations", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.ListUsableWorkstations", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListUsableWorkstations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.ListUsableWorkstationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.services.workstations.pagers.ListUsableWorkstationsAsyncPager", + "shortName": "list_usable_workstations" + }, + "description": "Sample for ListUsableWorkstations", + "file": "workstations_v1_generated_workstations_list_usable_workstations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_ListUsableWorkstations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_list_usable_workstations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.list_usable_workstations", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.ListUsableWorkstations", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListUsableWorkstations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.ListUsableWorkstationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.services.workstations.pagers.ListUsableWorkstationsPager", + "shortName": "list_usable_workstations" + }, + "description": "Sample for ListUsableWorkstations", + "file": "workstations_v1_generated_workstations_list_usable_workstations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_ListUsableWorkstations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_list_usable_workstations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.list_workstation_clusters", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.ListWorkstationClusters", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListWorkstationClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.ListWorkstationClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.services.workstations.pagers.ListWorkstationClustersAsyncPager", + "shortName": "list_workstation_clusters" + }, + "description": "Sample for ListWorkstationClusters", + "file": "workstations_v1_generated_workstations_list_workstation_clusters_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_ListWorkstationClusters_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_list_workstation_clusters_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.list_workstation_clusters", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.ListWorkstationClusters", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListWorkstationClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.ListWorkstationClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.services.workstations.pagers.ListWorkstationClustersPager", + "shortName": "list_workstation_clusters" + }, + "description": "Sample for ListWorkstationClusters", + "file": "workstations_v1_generated_workstations_list_workstation_clusters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_ListWorkstationClusters_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_list_workstation_clusters_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.list_workstation_configs", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.ListWorkstationConfigs", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListWorkstationConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.ListWorkstationConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.services.workstations.pagers.ListWorkstationConfigsAsyncPager", + "shortName": "list_workstation_configs" + }, + "description": "Sample for ListWorkstationConfigs", + "file": "workstations_v1_generated_workstations_list_workstation_configs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_ListWorkstationConfigs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_list_workstation_configs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.list_workstation_configs", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.ListWorkstationConfigs", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListWorkstationConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.ListWorkstationConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.services.workstations.pagers.ListWorkstationConfigsPager", + "shortName": "list_workstation_configs" + }, + "description": "Sample for ListWorkstationConfigs", + "file": "workstations_v1_generated_workstations_list_workstation_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_ListWorkstationConfigs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_list_workstation_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.list_workstations", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.ListWorkstations", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListWorkstations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.ListWorkstationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.services.workstations.pagers.ListWorkstationsAsyncPager", + "shortName": "list_workstations" + }, + "description": "Sample for ListWorkstations", + "file": "workstations_v1_generated_workstations_list_workstations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_ListWorkstations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_list_workstations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.list_workstations", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.ListWorkstations", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListWorkstations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.ListWorkstationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1.services.workstations.pagers.ListWorkstationsPager", + "shortName": "list_workstations" + }, + "description": "Sample for ListWorkstations", + "file": "workstations_v1_generated_workstations_list_workstations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_ListWorkstations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_list_workstations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.start_workstation", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.StartWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "StartWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.StartWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "start_workstation" + }, + "description": "Sample for StartWorkstation", + "file": "workstations_v1_generated_workstations_start_workstation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_StartWorkstation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_start_workstation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.start_workstation", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.StartWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "StartWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.StartWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "start_workstation" + }, + "description": "Sample for StartWorkstation", + "file": "workstations_v1_generated_workstations_start_workstation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_StartWorkstation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_start_workstation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.stop_workstation", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.StopWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "StopWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.StopWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "stop_workstation" + }, + "description": "Sample for StopWorkstation", + "file": "workstations_v1_generated_workstations_stop_workstation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_StopWorkstation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_stop_workstation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.stop_workstation", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.StopWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "StopWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.StopWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "stop_workstation" + }, + "description": "Sample for StopWorkstation", + "file": "workstations_v1_generated_workstations_stop_workstation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_StopWorkstation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_stop_workstation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.update_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.UpdateWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "UpdateWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.UpdateWorkstationClusterRequest" + }, + { + "name": "workstation_cluster", + "type": "google.cloud.workstations_v1.types.WorkstationCluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_workstation_cluster" + }, + "description": "Sample for UpdateWorkstationCluster", + "file": "workstations_v1_generated_workstations_update_workstation_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_UpdateWorkstationCluster_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_update_workstation_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.update_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.UpdateWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "UpdateWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.UpdateWorkstationClusterRequest" + }, + { + "name": "workstation_cluster", + "type": "google.cloud.workstations_v1.types.WorkstationCluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_workstation_cluster" + }, + "description": "Sample for UpdateWorkstationCluster", + "file": "workstations_v1_generated_workstations_update_workstation_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_UpdateWorkstationCluster_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_update_workstation_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.update_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.UpdateWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "UpdateWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.UpdateWorkstationConfigRequest" + }, + { + "name": "workstation_config", + "type": "google.cloud.workstations_v1.types.WorkstationConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_workstation_config" + }, + "description": "Sample for UpdateWorkstationConfig", + "file": "workstations_v1_generated_workstations_update_workstation_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_UpdateWorkstationConfig_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_update_workstation_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.update_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.UpdateWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "UpdateWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.UpdateWorkstationConfigRequest" + }, + { + "name": "workstation_config", + "type": "google.cloud.workstations_v1.types.WorkstationConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_workstation_config" + }, + "description": "Sample for UpdateWorkstationConfig", + "file": "workstations_v1_generated_workstations_update_workstation_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_UpdateWorkstationConfig_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_update_workstation_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsAsyncClient.update_workstation", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.UpdateWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "UpdateWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.UpdateWorkstationRequest" + }, + { + "name": "workstation", + "type": "google.cloud.workstations_v1.types.Workstation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_workstation" + }, + "description": "Sample for UpdateWorkstation", + "file": "workstations_v1_generated_workstations_update_workstation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_UpdateWorkstation_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_update_workstation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1.WorkstationsClient.update_workstation", + "method": { + "fullName": "google.cloud.workstations.v1.Workstations.UpdateWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1.Workstations", + "shortName": "Workstations" + }, + "shortName": "UpdateWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1.types.UpdateWorkstationRequest" + }, + { + "name": "workstation", + "type": "google.cloud.workstations_v1.types.Workstation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_workstation" + }, + "description": "Sample for UpdateWorkstation", + "file": "workstations_v1_generated_workstations_update_workstation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1_generated_Workstations_UpdateWorkstation_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1_generated_workstations_update_workstation_sync.py" + } + ] +} diff --git a/packages/google-cloud-workstations/samples/generated_samples/snippet_metadata_google.cloud.workstations.v1beta.json b/packages/google-cloud-workstations/samples/generated_samples/snippet_metadata_google.cloud.workstations.v1beta.json new file mode 100644 index 000000000000..4089c6861b7d --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/snippet_metadata_google.cloud.workstations.v1beta.json @@ -0,0 +1,3307 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.workstations.v1beta", + "version": "v1beta" + } + ], + "language": "PYTHON", + "name": "google-cloud-workstations", + "version": "0.4.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.create_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.CreateWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "CreateWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.CreateWorkstationClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "workstation_cluster", + "type": "google.cloud.workstations_v1beta.types.WorkstationCluster" + }, + { + "name": "workstation_cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_workstation_cluster" + }, + "description": "Sample for CreateWorkstationCluster", + "file": "workstations_v1beta_generated_workstations_create_workstation_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_CreateWorkstationCluster_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_create_workstation_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.create_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.CreateWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "CreateWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.CreateWorkstationClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "workstation_cluster", + "type": "google.cloud.workstations_v1beta.types.WorkstationCluster" + }, + { + "name": "workstation_cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_workstation_cluster" + }, + "description": "Sample for CreateWorkstationCluster", + "file": "workstations_v1beta_generated_workstations_create_workstation_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_CreateWorkstationCluster_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_create_workstation_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.create_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.CreateWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "CreateWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.CreateWorkstationConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "workstation_config", + "type": "google.cloud.workstations_v1beta.types.WorkstationConfig" + }, + { + "name": "workstation_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_workstation_config" + }, + "description": "Sample for CreateWorkstationConfig", + "file": "workstations_v1beta_generated_workstations_create_workstation_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_CreateWorkstationConfig_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_create_workstation_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.create_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.CreateWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "CreateWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.CreateWorkstationConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "workstation_config", + "type": "google.cloud.workstations_v1beta.types.WorkstationConfig" + }, + { + "name": "workstation_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_workstation_config" + }, + "description": "Sample for CreateWorkstationConfig", + "file": "workstations_v1beta_generated_workstations_create_workstation_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_CreateWorkstationConfig_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_create_workstation_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.create_workstation", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.CreateWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "CreateWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.CreateWorkstationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "workstation", + "type": "google.cloud.workstations_v1beta.types.Workstation" + }, + { + "name": "workstation_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_workstation" + }, + "description": "Sample for CreateWorkstation", + "file": "workstations_v1beta_generated_workstations_create_workstation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_CreateWorkstation_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_create_workstation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.create_workstation", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.CreateWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "CreateWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.CreateWorkstationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "workstation", + "type": "google.cloud.workstations_v1beta.types.Workstation" + }, + { + "name": "workstation_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_workstation" + }, + "description": "Sample for CreateWorkstation", + "file": "workstations_v1beta_generated_workstations_create_workstation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_CreateWorkstation_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_create_workstation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.delete_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.DeleteWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "DeleteWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.DeleteWorkstationClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_workstation_cluster" + }, + "description": "Sample for DeleteWorkstationCluster", + "file": "workstations_v1beta_generated_workstations_delete_workstation_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_DeleteWorkstationCluster_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_delete_workstation_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.delete_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.DeleteWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "DeleteWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.DeleteWorkstationClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_workstation_cluster" + }, + "description": "Sample for DeleteWorkstationCluster", + "file": "workstations_v1beta_generated_workstations_delete_workstation_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_DeleteWorkstationCluster_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_delete_workstation_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.delete_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.DeleteWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "DeleteWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.DeleteWorkstationConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_workstation_config" + }, + "description": "Sample for DeleteWorkstationConfig", + "file": "workstations_v1beta_generated_workstations_delete_workstation_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_DeleteWorkstationConfig_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_delete_workstation_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.delete_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.DeleteWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "DeleteWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.DeleteWorkstationConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_workstation_config" + }, + "description": "Sample for DeleteWorkstationConfig", + "file": "workstations_v1beta_generated_workstations_delete_workstation_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_DeleteWorkstationConfig_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_delete_workstation_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.delete_workstation", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.DeleteWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "DeleteWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.DeleteWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_workstation" + }, + "description": "Sample for DeleteWorkstation", + "file": "workstations_v1beta_generated_workstations_delete_workstation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_DeleteWorkstation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_delete_workstation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.delete_workstation", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.DeleteWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "DeleteWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.DeleteWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_workstation" + }, + "description": "Sample for DeleteWorkstation", + "file": "workstations_v1beta_generated_workstations_delete_workstation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_DeleteWorkstation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_delete_workstation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.generate_access_token", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.GenerateAccessToken", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "GenerateAccessToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.GenerateAccessTokenRequest" + }, + { + "name": "workstation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.types.GenerateAccessTokenResponse", + "shortName": "generate_access_token" + }, + "description": "Sample for GenerateAccessToken", + "file": "workstations_v1beta_generated_workstations_generate_access_token_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_GenerateAccessToken_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_generate_access_token_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.generate_access_token", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.GenerateAccessToken", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "GenerateAccessToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.GenerateAccessTokenRequest" + }, + { + "name": "workstation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.types.GenerateAccessTokenResponse", + "shortName": "generate_access_token" + }, + "description": "Sample for GenerateAccessToken", + "file": "workstations_v1beta_generated_workstations_generate_access_token_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_GenerateAccessToken_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_generate_access_token_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.get_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.GetWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "GetWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.GetWorkstationClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.types.WorkstationCluster", + "shortName": "get_workstation_cluster" + }, + "description": "Sample for GetWorkstationCluster", + "file": "workstations_v1beta_generated_workstations_get_workstation_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_GetWorkstationCluster_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_get_workstation_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.get_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.GetWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "GetWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.GetWorkstationClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.types.WorkstationCluster", + "shortName": "get_workstation_cluster" + }, + "description": "Sample for GetWorkstationCluster", + "file": "workstations_v1beta_generated_workstations_get_workstation_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_GetWorkstationCluster_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_get_workstation_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.get_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.GetWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "GetWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.GetWorkstationConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.types.WorkstationConfig", + "shortName": "get_workstation_config" + }, + "description": "Sample for GetWorkstationConfig", + "file": "workstations_v1beta_generated_workstations_get_workstation_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_GetWorkstationConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_get_workstation_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.get_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.GetWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "GetWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.GetWorkstationConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.types.WorkstationConfig", + "shortName": "get_workstation_config" + }, + "description": "Sample for GetWorkstationConfig", + "file": "workstations_v1beta_generated_workstations_get_workstation_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_GetWorkstationConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_get_workstation_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.get_workstation", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.GetWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "GetWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.GetWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.types.Workstation", + "shortName": "get_workstation" + }, + "description": "Sample for GetWorkstation", + "file": "workstations_v1beta_generated_workstations_get_workstation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_GetWorkstation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_get_workstation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.get_workstation", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.GetWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "GetWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.GetWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.types.Workstation", + "shortName": "get_workstation" + }, + "description": "Sample for GetWorkstation", + "file": "workstations_v1beta_generated_workstations_get_workstation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_GetWorkstation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_get_workstation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.list_usable_workstation_configs", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.ListUsableWorkstationConfigs", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListUsableWorkstationConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.ListUsableWorkstationConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.services.workstations.pagers.ListUsableWorkstationConfigsAsyncPager", + "shortName": "list_usable_workstation_configs" + }, + "description": "Sample for ListUsableWorkstationConfigs", + "file": "workstations_v1beta_generated_workstations_list_usable_workstation_configs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_ListUsableWorkstationConfigs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_list_usable_workstation_configs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.list_usable_workstation_configs", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.ListUsableWorkstationConfigs", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListUsableWorkstationConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.ListUsableWorkstationConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.services.workstations.pagers.ListUsableWorkstationConfigsPager", + "shortName": "list_usable_workstation_configs" + }, + "description": "Sample for ListUsableWorkstationConfigs", + "file": "workstations_v1beta_generated_workstations_list_usable_workstation_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_ListUsableWorkstationConfigs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_list_usable_workstation_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.list_usable_workstations", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.ListUsableWorkstations", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListUsableWorkstations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.ListUsableWorkstationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.services.workstations.pagers.ListUsableWorkstationsAsyncPager", + "shortName": "list_usable_workstations" + }, + "description": "Sample for ListUsableWorkstations", + "file": "workstations_v1beta_generated_workstations_list_usable_workstations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_ListUsableWorkstations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_list_usable_workstations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.list_usable_workstations", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.ListUsableWorkstations", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListUsableWorkstations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.ListUsableWorkstationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.services.workstations.pagers.ListUsableWorkstationsPager", + "shortName": "list_usable_workstations" + }, + "description": "Sample for ListUsableWorkstations", + "file": "workstations_v1beta_generated_workstations_list_usable_workstations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_ListUsableWorkstations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_list_usable_workstations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.list_workstation_clusters", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.ListWorkstationClusters", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListWorkstationClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.ListWorkstationClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.services.workstations.pagers.ListWorkstationClustersAsyncPager", + "shortName": "list_workstation_clusters" + }, + "description": "Sample for ListWorkstationClusters", + "file": "workstations_v1beta_generated_workstations_list_workstation_clusters_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_ListWorkstationClusters_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_list_workstation_clusters_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.list_workstation_clusters", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.ListWorkstationClusters", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListWorkstationClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.ListWorkstationClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.services.workstations.pagers.ListWorkstationClustersPager", + "shortName": "list_workstation_clusters" + }, + "description": "Sample for ListWorkstationClusters", + "file": "workstations_v1beta_generated_workstations_list_workstation_clusters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_ListWorkstationClusters_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_list_workstation_clusters_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.list_workstation_configs", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.ListWorkstationConfigs", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListWorkstationConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.ListWorkstationConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.services.workstations.pagers.ListWorkstationConfigsAsyncPager", + "shortName": "list_workstation_configs" + }, + "description": "Sample for ListWorkstationConfigs", + "file": "workstations_v1beta_generated_workstations_list_workstation_configs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_ListWorkstationConfigs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_list_workstation_configs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.list_workstation_configs", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.ListWorkstationConfigs", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListWorkstationConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.ListWorkstationConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.services.workstations.pagers.ListWorkstationConfigsPager", + "shortName": "list_workstation_configs" + }, + "description": "Sample for ListWorkstationConfigs", + "file": "workstations_v1beta_generated_workstations_list_workstation_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_ListWorkstationConfigs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_list_workstation_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.list_workstations", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.ListWorkstations", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListWorkstations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.ListWorkstationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.services.workstations.pagers.ListWorkstationsAsyncPager", + "shortName": "list_workstations" + }, + "description": "Sample for ListWorkstations", + "file": "workstations_v1beta_generated_workstations_list_workstations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_ListWorkstations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_list_workstations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.list_workstations", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.ListWorkstations", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "ListWorkstations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.ListWorkstationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.workstations_v1beta.services.workstations.pagers.ListWorkstationsPager", + "shortName": "list_workstations" + }, + "description": "Sample for ListWorkstations", + "file": "workstations_v1beta_generated_workstations_list_workstations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_ListWorkstations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_list_workstations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.start_workstation", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.StartWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "StartWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.StartWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "start_workstation" + }, + "description": "Sample for StartWorkstation", + "file": "workstations_v1beta_generated_workstations_start_workstation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_StartWorkstation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_start_workstation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.start_workstation", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.StartWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "StartWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.StartWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "start_workstation" + }, + "description": "Sample for StartWorkstation", + "file": "workstations_v1beta_generated_workstations_start_workstation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_StartWorkstation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_start_workstation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.stop_workstation", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.StopWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "StopWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.StopWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "stop_workstation" + }, + "description": "Sample for StopWorkstation", + "file": "workstations_v1beta_generated_workstations_stop_workstation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_StopWorkstation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_stop_workstation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.stop_workstation", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.StopWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "StopWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.StopWorkstationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "stop_workstation" + }, + "description": "Sample for StopWorkstation", + "file": "workstations_v1beta_generated_workstations_stop_workstation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_StopWorkstation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_stop_workstation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.update_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.UpdateWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "UpdateWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.UpdateWorkstationClusterRequest" + }, + { + "name": "workstation_cluster", + "type": "google.cloud.workstations_v1beta.types.WorkstationCluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_workstation_cluster" + }, + "description": "Sample for UpdateWorkstationCluster", + "file": "workstations_v1beta_generated_workstations_update_workstation_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_UpdateWorkstationCluster_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_update_workstation_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.update_workstation_cluster", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.UpdateWorkstationCluster", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "UpdateWorkstationCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.UpdateWorkstationClusterRequest" + }, + { + "name": "workstation_cluster", + "type": "google.cloud.workstations_v1beta.types.WorkstationCluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_workstation_cluster" + }, + "description": "Sample for UpdateWorkstationCluster", + "file": "workstations_v1beta_generated_workstations_update_workstation_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_UpdateWorkstationCluster_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_update_workstation_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.update_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.UpdateWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "UpdateWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.UpdateWorkstationConfigRequest" + }, + { + "name": "workstation_config", + "type": "google.cloud.workstations_v1beta.types.WorkstationConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_workstation_config" + }, + "description": "Sample for UpdateWorkstationConfig", + "file": "workstations_v1beta_generated_workstations_update_workstation_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_UpdateWorkstationConfig_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_update_workstation_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.update_workstation_config", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.UpdateWorkstationConfig", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "UpdateWorkstationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.UpdateWorkstationConfigRequest" + }, + { + "name": "workstation_config", + "type": "google.cloud.workstations_v1beta.types.WorkstationConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_workstation_config" + }, + "description": "Sample for UpdateWorkstationConfig", + "file": "workstations_v1beta_generated_workstations_update_workstation_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_UpdateWorkstationConfig_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_update_workstation_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient", + "shortName": "WorkstationsAsyncClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsAsyncClient.update_workstation", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.UpdateWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "UpdateWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.UpdateWorkstationRequest" + }, + { + "name": "workstation", + "type": "google.cloud.workstations_v1beta.types.Workstation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_workstation" + }, + "description": "Sample for UpdateWorkstation", + "file": "workstations_v1beta_generated_workstations_update_workstation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_UpdateWorkstation_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_update_workstation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient", + "shortName": "WorkstationsClient" + }, + "fullName": "google.cloud.workstations_v1beta.WorkstationsClient.update_workstation", + "method": { + "fullName": "google.cloud.workstations.v1beta.Workstations.UpdateWorkstation", + "service": { + "fullName": "google.cloud.workstations.v1beta.Workstations", + "shortName": "Workstations" + }, + "shortName": "UpdateWorkstation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.workstations_v1beta.types.UpdateWorkstationRequest" + }, + { + "name": "workstation", + "type": "google.cloud.workstations_v1beta.types.Workstation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_workstation" + }, + "description": "Sample for UpdateWorkstation", + "file": "workstations_v1beta_generated_workstations_update_workstation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "workstations_v1beta_generated_Workstations_UpdateWorkstation_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "workstations_v1beta_generated_workstations_update_workstation_sync.py" + } + ] +} diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_async.py new file mode 100644 index 000000000000..2b98f5868e98 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_CreateWorkstation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_create_workstation(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.CreateWorkstationRequest( + parent="parent_value", + workstation_id="workstation_id_value", + ) + + # Make the request + operation = client.create_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_CreateWorkstation_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_cluster_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_cluster_async.py new file mode 100644 index 000000000000..2c79f9767b1f --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_cluster_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_CreateWorkstationCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_create_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.CreateWorkstationClusterRequest( + parent="parent_value", + workstation_cluster_id="workstation_cluster_id_value", + ) + + # Make the request + operation = client.create_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_CreateWorkstationCluster_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_cluster_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_cluster_sync.py new file mode 100644 index 000000000000..1b52bb03c3d2 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_cluster_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_CreateWorkstationCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_create_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.CreateWorkstationClusterRequest( + parent="parent_value", + workstation_cluster_id="workstation_cluster_id_value", + ) + + # Make the request + operation = client.create_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_CreateWorkstationCluster_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_config_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_config_async.py new file mode 100644 index 000000000000..34dfde2cb6e2 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_config_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_CreateWorkstationConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_create_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.CreateWorkstationConfigRequest( + parent="parent_value", + workstation_config_id="workstation_config_id_value", + ) + + # Make the request + operation = client.create_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_CreateWorkstationConfig_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_config_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_config_sync.py new file mode 100644 index 000000000000..22c056da3f67 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_config_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_CreateWorkstationConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_create_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.CreateWorkstationConfigRequest( + parent="parent_value", + workstation_config_id="workstation_config_id_value", + ) + + # Make the request + operation = client.create_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_CreateWorkstationConfig_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_sync.py new file mode 100644 index 000000000000..700a493b6603 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_create_workstation_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_CreateWorkstation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_create_workstation(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.CreateWorkstationRequest( + parent="parent_value", + workstation_id="workstation_id_value", + ) + + # Make the request + operation = client.create_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_CreateWorkstation_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_async.py new file mode 100644 index 000000000000..5514865fc2f6 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_DeleteWorkstation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_delete_workstation(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.DeleteWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_DeleteWorkstation_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_cluster_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_cluster_async.py new file mode 100644 index 000000000000..28d180f0997b --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_cluster_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_DeleteWorkstationCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_delete_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.DeleteWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_DeleteWorkstationCluster_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_cluster_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_cluster_sync.py new file mode 100644 index 000000000000..b6e485603f30 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_cluster_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_DeleteWorkstationCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_delete_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.DeleteWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_DeleteWorkstationCluster_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_config_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_config_async.py new file mode 100644 index 000000000000..8470b58e85b7 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_config_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_DeleteWorkstationConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_delete_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.DeleteWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_DeleteWorkstationConfig_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_config_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_config_sync.py new file mode 100644 index 000000000000..997a1a6374c5 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_config_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_DeleteWorkstationConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_delete_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.DeleteWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_DeleteWorkstationConfig_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_sync.py new file mode 100644 index 000000000000..93bef704d960 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_delete_workstation_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_DeleteWorkstation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_delete_workstation(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.DeleteWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_DeleteWorkstation_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_generate_access_token_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_generate_access_token_async.py new file mode 100644 index 000000000000..321df3cbf79c --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_generate_access_token_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAccessToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_GenerateAccessToken_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_generate_access_token(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.GenerateAccessTokenRequest( + workstation="workstation_value", + ) + + # Make the request + response = await client.generate_access_token(request=request) + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_GenerateAccessToken_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_generate_access_token_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_generate_access_token_sync.py new file mode 100644 index 000000000000..a72693bef82d --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_generate_access_token_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAccessToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_GenerateAccessToken_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_generate_access_token(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.GenerateAccessTokenRequest( + workstation="workstation_value", + ) + + # Make the request + response = client.generate_access_token(request=request) + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_GenerateAccessToken_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_async.py new file mode 100644 index 000000000000..ee695ab5630e --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_GetWorkstation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_get_workstation(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.GetWorkstationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workstation(request=request) + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_GetWorkstation_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_cluster_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_cluster_async.py new file mode 100644 index 000000000000..5e025bfe3152 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_cluster_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_GetWorkstationCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_get_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.GetWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workstation_cluster(request=request) + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_GetWorkstationCluster_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_cluster_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_cluster_sync.py new file mode 100644 index 000000000000..43637c368e66 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_cluster_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_GetWorkstationCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_get_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.GetWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_workstation_cluster(request=request) + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_GetWorkstationCluster_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_config_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_config_async.py new file mode 100644 index 000000000000..90d8ea0be768 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_GetWorkstationConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_get_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.GetWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workstation_config(request=request) + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_GetWorkstationConfig_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_config_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_config_sync.py new file mode 100644 index 000000000000..8b54b8dca2cd --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_GetWorkstationConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_get_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.GetWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_workstation_config(request=request) + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_GetWorkstationConfig_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_sync.py new file mode 100644 index 000000000000..c7a9bbfbd54e --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_get_workstation_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_GetWorkstation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_get_workstation(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.GetWorkstationRequest( + name="name_value", + ) + + # Make the request + response = client.get_workstation(request=request) + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_GetWorkstation_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_usable_workstation_configs_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_usable_workstation_configs_async.py new file mode 100644 index 000000000000..3143e4429e06 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_usable_workstation_configs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUsableWorkstationConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_ListUsableWorkstationConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_list_usable_workstation_configs(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.ListUsableWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstation_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END workstations_v1_generated_Workstations_ListUsableWorkstationConfigs_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_usable_workstation_configs_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_usable_workstation_configs_sync.py new file mode 100644 index 000000000000..fb925cf446d0 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_usable_workstation_configs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUsableWorkstationConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_ListUsableWorkstationConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_list_usable_workstation_configs(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.ListUsableWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstation_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END workstations_v1_generated_Workstations_ListUsableWorkstationConfigs_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_usable_workstations_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_usable_workstations_async.py new file mode 100644 index 000000000000..40252d403768 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_usable_workstations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUsableWorkstations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_ListUsableWorkstations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_list_usable_workstations(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.ListUsableWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END workstations_v1_generated_Workstations_ListUsableWorkstations_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_usable_workstations_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_usable_workstations_sync.py new file mode 100644 index 000000000000..a8baef12b697 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_usable_workstations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUsableWorkstations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_ListUsableWorkstations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_list_usable_workstations(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.ListUsableWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END workstations_v1_generated_Workstations_ListUsableWorkstations_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstation_clusters_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstation_clusters_async.py new file mode 100644 index 000000000000..4f8e12c501a2 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstation_clusters_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkstationClusters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_ListWorkstationClusters_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_list_workstation_clusters(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.ListWorkstationClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_clusters(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END workstations_v1_generated_Workstations_ListWorkstationClusters_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstation_clusters_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstation_clusters_sync.py new file mode 100644 index 000000000000..57e1a0c79255 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstation_clusters_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkstationClusters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_ListWorkstationClusters_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_list_workstation_clusters(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.ListWorkstationClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END workstations_v1_generated_Workstations_ListWorkstationClusters_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstation_configs_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstation_configs_async.py new file mode 100644 index 000000000000..6e47c24da809 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstation_configs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkstationConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_ListWorkstationConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_list_workstation_configs(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.ListWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END workstations_v1_generated_Workstations_ListWorkstationConfigs_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstation_configs_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstation_configs_sync.py new file mode 100644 index 000000000000..2054d94a69f8 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstation_configs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkstationConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_ListWorkstationConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_list_workstation_configs(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.ListWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END workstations_v1_generated_Workstations_ListWorkstationConfigs_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstations_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstations_async.py new file mode 100644 index 000000000000..e18ec15c52ca --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkstations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_ListWorkstations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_list_workstations(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.ListWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END workstations_v1_generated_Workstations_ListWorkstations_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstations_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstations_sync.py new file mode 100644 index 000000000000..db3c784b9a88 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_list_workstations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkstations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_ListWorkstations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_list_workstations(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.ListWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END workstations_v1_generated_Workstations_ListWorkstations_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_start_workstation_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_start_workstation_async.py new file mode 100644 index 000000000000..9423ed1ead91 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_start_workstation_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_StartWorkstation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_start_workstation(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.StartWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.start_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_StartWorkstation_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_start_workstation_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_start_workstation_sync.py new file mode 100644 index 000000000000..65b40034d944 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_start_workstation_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_StartWorkstation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_start_workstation(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.StartWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.start_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_StartWorkstation_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_stop_workstation_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_stop_workstation_async.py new file mode 100644 index 000000000000..f15f6c98bbb2 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_stop_workstation_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_StopWorkstation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_stop_workstation(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.StopWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.stop_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_StopWorkstation_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_stop_workstation_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_stop_workstation_sync.py new file mode 100644 index 000000000000..5555b7bee7f5 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_stop_workstation_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_StopWorkstation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_stop_workstation(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.StopWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.stop_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_StopWorkstation_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_async.py new file mode 100644 index 000000000000..3ae862b4b98e --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_UpdateWorkstation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_update_workstation(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.UpdateWorkstationRequest( + ) + + # Make the request + operation = client.update_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_UpdateWorkstation_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_cluster_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_cluster_async.py new file mode 100644 index 000000000000..0062ff2868b9 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_cluster_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_UpdateWorkstationCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_update_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.UpdateWorkstationClusterRequest( + ) + + # Make the request + operation = client.update_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_UpdateWorkstationCluster_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_cluster_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_cluster_sync.py new file mode 100644 index 000000000000..e02935571427 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_cluster_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_UpdateWorkstationCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_update_workstation_cluster(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.UpdateWorkstationClusterRequest( + ) + + # Make the request + operation = client.update_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_UpdateWorkstationCluster_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_config_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_config_async.py new file mode 100644 index 000000000000..20c379111bc0 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_config_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_UpdateWorkstationConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +async def sample_update_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1.UpdateWorkstationConfigRequest( + ) + + # Make the request + operation = client.update_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_UpdateWorkstationConfig_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_config_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_config_sync.py new file mode 100644 index 000000000000..b07fb618ea92 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_config_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_UpdateWorkstationConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_update_workstation_config(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.UpdateWorkstationConfigRequest( + ) + + # Make the request + operation = client.update_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_UpdateWorkstationConfig_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_sync.py new file mode 100644 index 000000000000..13ac0ce7edfd --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1_generated_workstations_update_workstation_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1_generated_Workstations_UpdateWorkstation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1 + + +def sample_update_workstation(): + # Create a client + client = workstations_v1.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1.UpdateWorkstationRequest( + ) + + # Make the request + operation = client.update_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1_generated_Workstations_UpdateWorkstation_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_async.py new file mode 100644 index 000000000000..cf239bcf20b7 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_CreateWorkstation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_create_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.CreateWorkstationRequest( + parent="parent_value", + workstation_id="workstation_id_value", + ) + + # Make the request + operation = client.create_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_CreateWorkstation_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_cluster_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_cluster_async.py new file mode 100644 index 000000000000..c262a83607da --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_cluster_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_CreateWorkstationCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_create_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.CreateWorkstationClusterRequest( + parent="parent_value", + workstation_cluster_id="workstation_cluster_id_value", + ) + + # Make the request + operation = client.create_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_CreateWorkstationCluster_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_cluster_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_cluster_sync.py new file mode 100644 index 000000000000..beecbba09d6d --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_cluster_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_CreateWorkstationCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_create_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.CreateWorkstationClusterRequest( + parent="parent_value", + workstation_cluster_id="workstation_cluster_id_value", + ) + + # Make the request + operation = client.create_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_CreateWorkstationCluster_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_config_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_config_async.py new file mode 100644 index 000000000000..ef57f4925d1a --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_config_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_CreateWorkstationConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_create_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.CreateWorkstationConfigRequest( + parent="parent_value", + workstation_config_id="workstation_config_id_value", + ) + + # Make the request + operation = client.create_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_CreateWorkstationConfig_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_config_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_config_sync.py new file mode 100644 index 000000000000..c82579aa0862 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_config_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_CreateWorkstationConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_create_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.CreateWorkstationConfigRequest( + parent="parent_value", + workstation_config_id="workstation_config_id_value", + ) + + # Make the request + operation = client.create_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_CreateWorkstationConfig_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_sync.py new file mode 100644 index 000000000000..50ec83e5c70f --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_create_workstation_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_CreateWorkstation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_create_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.CreateWorkstationRequest( + parent="parent_value", + workstation_id="workstation_id_value", + ) + + # Make the request + operation = client.create_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_CreateWorkstation_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_async.py new file mode 100644 index 000000000000..7c60beabe3f7 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_DeleteWorkstation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_delete_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.DeleteWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_DeleteWorkstation_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_cluster_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_cluster_async.py new file mode 100644 index 000000000000..b18a88187b2a --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_cluster_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_DeleteWorkstationCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_delete_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.DeleteWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_DeleteWorkstationCluster_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_cluster_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_cluster_sync.py new file mode 100644 index 000000000000..4f8a1cfdc876 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_cluster_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_DeleteWorkstationCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_delete_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.DeleteWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_DeleteWorkstationCluster_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_config_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_config_async.py new file mode 100644 index 000000000000..3670467f269c --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_config_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_DeleteWorkstationConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_delete_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.DeleteWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_DeleteWorkstationConfig_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_config_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_config_sync.py new file mode 100644 index 000000000000..f0ae52ed6de6 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_config_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_DeleteWorkstationConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_delete_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.DeleteWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_DeleteWorkstationConfig_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_sync.py new file mode 100644 index 000000000000..6c07b6c96521 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_delete_workstation_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_DeleteWorkstation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_delete_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.DeleteWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_DeleteWorkstation_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_generate_access_token_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_generate_access_token_async.py new file mode 100644 index 000000000000..95dadbcd3f77 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_generate_access_token_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAccessToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_GenerateAccessToken_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_generate_access_token(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.GenerateAccessTokenRequest( + workstation="workstation_value", + ) + + # Make the request + response = await client.generate_access_token(request=request) + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_GenerateAccessToken_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_generate_access_token_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_generate_access_token_sync.py new file mode 100644 index 000000000000..3a2c840c189f --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_generate_access_token_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAccessToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_GenerateAccessToken_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_generate_access_token(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.GenerateAccessTokenRequest( + workstation="workstation_value", + ) + + # Make the request + response = client.generate_access_token(request=request) + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_GenerateAccessToken_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_async.py new file mode 100644 index 000000000000..875d558124b1 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_GetWorkstation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_get_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.GetWorkstationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workstation(request=request) + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_GetWorkstation_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_cluster_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_cluster_async.py new file mode 100644 index 000000000000..71f03329aec3 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_cluster_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_GetWorkstationCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_get_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.GetWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workstation_cluster(request=request) + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_GetWorkstationCluster_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_cluster_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_cluster_sync.py new file mode 100644 index 000000000000..df6d097cda34 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_cluster_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_GetWorkstationCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_get_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.GetWorkstationClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_workstation_cluster(request=request) + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_GetWorkstationCluster_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_config_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_config_async.py new file mode 100644 index 000000000000..65b95a398034 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_GetWorkstationConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_get_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.GetWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workstation_config(request=request) + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_GetWorkstationConfig_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_config_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_config_sync.py new file mode 100644 index 000000000000..af3af58fe4e1 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_GetWorkstationConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_get_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.GetWorkstationConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_workstation_config(request=request) + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_GetWorkstationConfig_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_sync.py new file mode 100644 index 000000000000..a4f13653f811 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_get_workstation_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_GetWorkstation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_get_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.GetWorkstationRequest( + name="name_value", + ) + + # Make the request + response = client.get_workstation(request=request) + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_GetWorkstation_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_usable_workstation_configs_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_usable_workstation_configs_async.py new file mode 100644 index 000000000000..81c522fa1cc4 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_usable_workstation_configs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUsableWorkstationConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_ListUsableWorkstationConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_list_usable_workstation_configs(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListUsableWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstation_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END workstations_v1beta_generated_Workstations_ListUsableWorkstationConfigs_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_usable_workstation_configs_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_usable_workstation_configs_sync.py new file mode 100644 index 000000000000..4bef654d941a --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_usable_workstation_configs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUsableWorkstationConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_ListUsableWorkstationConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_list_usable_workstation_configs(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListUsableWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstation_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END workstations_v1beta_generated_Workstations_ListUsableWorkstationConfigs_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_usable_workstations_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_usable_workstations_async.py new file mode 100644 index 000000000000..eaed2dd3f1f9 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_usable_workstations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUsableWorkstations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_ListUsableWorkstations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_list_usable_workstations(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListUsableWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END workstations_v1beta_generated_Workstations_ListUsableWorkstations_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_usable_workstations_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_usable_workstations_sync.py new file mode 100644 index 000000000000..c47b64e60a37 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_usable_workstations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUsableWorkstations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_ListUsableWorkstations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_list_usable_workstations(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListUsableWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_workstations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END workstations_v1beta_generated_Workstations_ListUsableWorkstations_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstation_clusters_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstation_clusters_async.py new file mode 100644 index 000000000000..3be23772c211 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstation_clusters_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkstationClusters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_ListWorkstationClusters_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_list_workstation_clusters(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListWorkstationClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_clusters(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END workstations_v1beta_generated_Workstations_ListWorkstationClusters_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstation_clusters_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstation_clusters_sync.py new file mode 100644 index 000000000000..621654ef8b34 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstation_clusters_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkstationClusters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_ListWorkstationClusters_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_list_workstation_clusters(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListWorkstationClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END workstations_v1beta_generated_Workstations_ListWorkstationClusters_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstation_configs_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstation_configs_async.py new file mode 100644 index 000000000000..5ea89dd65335 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstation_configs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkstationConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_ListWorkstationConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_list_workstation_configs(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END workstations_v1beta_generated_Workstations_ListWorkstationConfigs_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstation_configs_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstation_configs_sync.py new file mode 100644 index 000000000000..055f78d46b16 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstation_configs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkstationConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_ListWorkstationConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_list_workstation_configs(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListWorkstationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstation_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END workstations_v1beta_generated_Workstations_ListWorkstationConfigs_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstations_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstations_async.py new file mode 100644 index 000000000000..12bb975a1794 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkstations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_ListWorkstations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_list_workstations(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END workstations_v1beta_generated_Workstations_ListWorkstations_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstations_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstations_sync.py new file mode 100644 index 000000000000..1386a571d3c9 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_list_workstations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkstations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_ListWorkstations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_list_workstations(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.ListWorkstationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workstations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END workstations_v1beta_generated_Workstations_ListWorkstations_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_start_workstation_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_start_workstation_async.py new file mode 100644 index 000000000000..37d8dac1d6af --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_start_workstation_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_StartWorkstation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_start_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.StartWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.start_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_StartWorkstation_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_start_workstation_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_start_workstation_sync.py new file mode 100644 index 000000000000..4d69cefa4fd1 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_start_workstation_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_StartWorkstation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_start_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.StartWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.start_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_StartWorkstation_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_stop_workstation_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_stop_workstation_async.py new file mode 100644 index 000000000000..d78986c81c85 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_stop_workstation_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_StopWorkstation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_stop_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.StopWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.stop_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_StopWorkstation_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_stop_workstation_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_stop_workstation_sync.py new file mode 100644 index 000000000000..4f9f1fbae23c --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_stop_workstation_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_StopWorkstation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_stop_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.StopWorkstationRequest( + name="name_value", + ) + + # Make the request + operation = client.stop_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_StopWorkstation_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_async.py new file mode 100644 index 000000000000..96784861a5b5 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_UpdateWorkstation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_update_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.UpdateWorkstationRequest( + ) + + # Make the request + operation = client.update_workstation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_UpdateWorkstation_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_cluster_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_cluster_async.py new file mode 100644 index 000000000000..93c36bf8c65b --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_cluster_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_UpdateWorkstationCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_update_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.UpdateWorkstationClusterRequest( + ) + + # Make the request + operation = client.update_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_UpdateWorkstationCluster_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_cluster_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_cluster_sync.py new file mode 100644 index 000000000000..a9105eec2e00 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_cluster_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkstationCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_UpdateWorkstationCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_update_workstation_cluster(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.UpdateWorkstationClusterRequest( + ) + + # Make the request + operation = client.update_workstation_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_UpdateWorkstationCluster_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_config_async.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_config_async.py new file mode 100644 index 000000000000..73b4ddc2aa95 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_config_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_UpdateWorkstationConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +async def sample_update_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsAsyncClient() + + # Initialize request argument(s) + request = workstations_v1beta.UpdateWorkstationConfigRequest( + ) + + # Make the request + operation = client.update_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_UpdateWorkstationConfig_async] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_config_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_config_sync.py new file mode 100644 index 000000000000..e6446b2302f2 --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_config_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkstationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_UpdateWorkstationConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_update_workstation_config(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.UpdateWorkstationConfigRequest( + ) + + # Make the request + operation = client.update_workstation_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_UpdateWorkstationConfig_sync] diff --git a/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_sync.py b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_sync.py new file mode 100644 index 000000000000..816d7ff9ffbe --- /dev/null +++ b/packages/google-cloud-workstations/samples/generated_samples/workstations_v1beta_generated_workstations_update_workstation_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkstation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-workstations + + +# [START workstations_v1beta_generated_Workstations_UpdateWorkstation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import workstations_v1beta + + +def sample_update_workstation(): + # Create a client + client = workstations_v1beta.WorkstationsClient() + + # Initialize request argument(s) + request = workstations_v1beta.UpdateWorkstationRequest( + ) + + # Make the request + operation = client.update_workstation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END workstations_v1beta_generated_Workstations_UpdateWorkstation_sync] diff --git a/packages/google-cloud-workstations/scripts/decrypt-secrets.sh b/packages/google-cloud-workstations/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-cloud-workstations/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-workstations/scripts/fixup_workstations_v1_keywords.py b/packages/google-cloud-workstations/scripts/fixup_workstations_v1_keywords.py new file mode 100644 index 000000000000..56e247b32782 --- /dev/null +++ b/packages/google-cloud-workstations/scripts/fixup_workstations_v1_keywords.py @@ -0,0 +1,195 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class workstationsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_workstation': ('parent', 'workstation_id', 'workstation', 'validate_only', ), + 'create_workstation_cluster': ('parent', 'workstation_cluster_id', 'workstation_cluster', 'validate_only', ), + 'create_workstation_config': ('parent', 'workstation_config_id', 'workstation_config', 'validate_only', ), + 'delete_workstation': ('name', 'validate_only', 'etag', ), + 'delete_workstation_cluster': ('name', 'validate_only', 'etag', 'force', ), + 'delete_workstation_config': ('name', 'validate_only', 'etag', 'force', ), + 'generate_access_token': ('workstation', 'expire_time', 'ttl', ), + 'get_workstation': ('name', ), + 'get_workstation_cluster': ('name', ), + 'get_workstation_config': ('name', ), + 'list_usable_workstation_configs': ('parent', 'page_size', 'page_token', ), + 'list_usable_workstations': ('parent', 'page_size', 'page_token', ), + 'list_workstation_clusters': ('parent', 'page_size', 'page_token', ), + 'list_workstation_configs': ('parent', 'page_size', 'page_token', ), + 'list_workstations': ('parent', 'page_size', 'page_token', ), + 'start_workstation': ('name', 'validate_only', 'etag', ), + 'stop_workstation': ('name', 'validate_only', 'etag', ), + 'update_workstation': ('workstation', 'update_mask', 'validate_only', 'allow_missing', ), + 'update_workstation_cluster': ('workstation_cluster', 'update_mask', 'validate_only', 'allow_missing', ), + 'update_workstation_config': ('workstation_config', 'update_mask', 'validate_only', 'allow_missing', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=workstationsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the workstations client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-workstations/scripts/fixup_workstations_v1beta_keywords.py b/packages/google-cloud-workstations/scripts/fixup_workstations_v1beta_keywords.py new file mode 100644 index 000000000000..56e247b32782 --- /dev/null +++ b/packages/google-cloud-workstations/scripts/fixup_workstations_v1beta_keywords.py @@ -0,0 +1,195 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class workstationsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_workstation': ('parent', 'workstation_id', 'workstation', 'validate_only', ), + 'create_workstation_cluster': ('parent', 'workstation_cluster_id', 'workstation_cluster', 'validate_only', ), + 'create_workstation_config': ('parent', 'workstation_config_id', 'workstation_config', 'validate_only', ), + 'delete_workstation': ('name', 'validate_only', 'etag', ), + 'delete_workstation_cluster': ('name', 'validate_only', 'etag', 'force', ), + 'delete_workstation_config': ('name', 'validate_only', 'etag', 'force', ), + 'generate_access_token': ('workstation', 'expire_time', 'ttl', ), + 'get_workstation': ('name', ), + 'get_workstation_cluster': ('name', ), + 'get_workstation_config': ('name', ), + 'list_usable_workstation_configs': ('parent', 'page_size', 'page_token', ), + 'list_usable_workstations': ('parent', 'page_size', 'page_token', ), + 'list_workstation_clusters': ('parent', 'page_size', 'page_token', ), + 'list_workstation_configs': ('parent', 'page_size', 'page_token', ), + 'list_workstations': ('parent', 'page_size', 'page_token', ), + 'start_workstation': ('name', 'validate_only', 'etag', ), + 'stop_workstation': ('name', 'validate_only', 'etag', ), + 'update_workstation': ('workstation', 'update_mask', 'validate_only', 'allow_missing', ), + 'update_workstation_cluster': ('workstation_cluster', 'update_mask', 'validate_only', 'allow_missing', ), + 'update_workstation_config': ('workstation_config', 'update_mask', 'validate_only', 'allow_missing', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=workstationsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the workstations client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-workstations/setup.py b/packages/google-cloud-workstations/setup.py new file mode 100644 index 000000000000..00934948ac82 --- /dev/null +++ b/packages/google-cloud-workstations/setup.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-workstations" + + +description = "Google Cloud Workstations API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/workstations/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-workstations/testing/.gitignore b/packages/google-cloud-workstations/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-workstations/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-workstations/testing/constraints-3.10.txt b/packages/google-cloud-workstations/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-workstations/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-workstations/testing/constraints-3.11.txt b/packages/google-cloud-workstations/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-workstations/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-workstations/testing/constraints-3.12.txt b/packages/google-cloud-workstations/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-workstations/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-workstations/testing/constraints-3.7.txt b/packages/google-cloud-workstations/testing/constraints-3.7.txt new file mode 100644 index 000000000000..2beecf99e0be --- /dev/null +++ b/packages/google-cloud-workstations/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/packages/google-cloud-workstations/testing/constraints-3.8.txt b/packages/google-cloud-workstations/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-workstations/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-workstations/testing/constraints-3.9.txt b/packages/google-cloud-workstations/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-workstations/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-workstations/tests/__init__.py b/packages/google-cloud-workstations/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-workstations/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-workstations/tests/unit/__init__.py b/packages/google-cloud-workstations/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-workstations/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-workstations/tests/unit/gapic/__init__.py b/packages/google-cloud-workstations/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-workstations/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1/__init__.py b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1/test_workstations.py b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1/test_workstations.py new file mode 100644 index 000000000000..0dab4ed41041 --- /dev/null +++ b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1/test_workstations.py @@ -0,0 +1,15738 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.workstations_v1.services.workstations import ( + WorkstationsAsyncClient, + WorkstationsClient, + pagers, + transports, +) +from google.cloud.workstations_v1.types import workstations + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert WorkstationsClient._get_default_mtls_endpoint(None) is None + assert ( + WorkstationsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + WorkstationsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + WorkstationsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + WorkstationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert WorkstationsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (WorkstationsClient, "grpc"), + (WorkstationsAsyncClient, "grpc_asyncio"), + (WorkstationsClient, "rest"), + ], +) +def test_workstations_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "workstations.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://workstations.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.WorkstationsGrpcTransport, "grpc"), + (transports.WorkstationsGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.WorkstationsRestTransport, "rest"), + ], +) +def test_workstations_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (WorkstationsClient, "grpc"), + (WorkstationsAsyncClient, "grpc_asyncio"), + (WorkstationsClient, "rest"), + ], +) +def test_workstations_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "workstations.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://workstations.googleapis.com" + ) + + +def test_workstations_client_get_transport_class(): + transport = WorkstationsClient.get_transport_class() + available_transports = [ + transports.WorkstationsGrpcTransport, + transports.WorkstationsRestTransport, + ] + assert transport in available_transports + + transport = WorkstationsClient.get_transport_class("grpc") + assert transport == transports.WorkstationsGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (WorkstationsClient, transports.WorkstationsGrpcTransport, "grpc"), + ( + WorkstationsAsyncClient, + transports.WorkstationsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (WorkstationsClient, transports.WorkstationsRestTransport, "rest"), + ], +) +@mock.patch.object( + WorkstationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(WorkstationsClient) +) +@mock.patch.object( + WorkstationsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(WorkstationsAsyncClient), +) +def test_workstations_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(WorkstationsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(WorkstationsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (WorkstationsClient, transports.WorkstationsGrpcTransport, "grpc", "true"), + ( + WorkstationsAsyncClient, + transports.WorkstationsGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (WorkstationsClient, transports.WorkstationsGrpcTransport, "grpc", "false"), + ( + WorkstationsAsyncClient, + transports.WorkstationsGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (WorkstationsClient, transports.WorkstationsRestTransport, "rest", "true"), + (WorkstationsClient, transports.WorkstationsRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + WorkstationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(WorkstationsClient) +) +@mock.patch.object( + WorkstationsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(WorkstationsAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_workstations_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [WorkstationsClient, WorkstationsAsyncClient]) +@mock.patch.object( + WorkstationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(WorkstationsClient) +) +@mock.patch.object( + WorkstationsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(WorkstationsAsyncClient), +) +def test_workstations_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (WorkstationsClient, transports.WorkstationsGrpcTransport, "grpc"), + ( + WorkstationsAsyncClient, + transports.WorkstationsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (WorkstationsClient, transports.WorkstationsRestTransport, "rest"), + ], +) +def test_workstations_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + WorkstationsClient, + transports.WorkstationsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + WorkstationsAsyncClient, + transports.WorkstationsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (WorkstationsClient, transports.WorkstationsRestTransport, "rest", None), + ], +) +def test_workstations_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_workstations_client_client_options_from_dict(): + with mock.patch( + "google.cloud.workstations_v1.services.workstations.transports.WorkstationsGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = WorkstationsClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + WorkstationsClient, + transports.WorkstationsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + WorkstationsAsyncClient, + transports.WorkstationsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_workstations_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "workstations.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="workstations.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GetWorkstationClusterRequest, + dict, + ], +) +def test_get_workstation_cluster(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.WorkstationCluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + network="network_value", + subnetwork="subnetwork_value", + control_plane_ip="control_plane_ip_value", + degraded=True, + ) + response = client.get_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.WorkstationCluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.network == "network_value" + assert response.subnetwork == "subnetwork_value" + assert response.control_plane_ip == "control_plane_ip_value" + assert response.degraded is True + + +def test_get_workstation_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_cluster), "__call__" + ) as call: + client.get_workstation_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationClusterRequest() + + +@pytest.mark.asyncio +async def test_get_workstation_cluster_async( + transport: str = "grpc_asyncio", + request_type=workstations.GetWorkstationClusterRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.WorkstationCluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + network="network_value", + subnetwork="subnetwork_value", + control_plane_ip="control_plane_ip_value", + degraded=True, + ) + ) + response = await client.get_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.WorkstationCluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.network == "network_value" + assert response.subnetwork == "subnetwork_value" + assert response.control_plane_ip == "control_plane_ip_value" + assert response.degraded is True + + +@pytest.mark.asyncio +async def test_get_workstation_cluster_async_from_dict(): + await test_get_workstation_cluster_async(request_type=dict) + + +def test_get_workstation_cluster_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GetWorkstationClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_cluster), "__call__" + ) as call: + call.return_value = workstations.WorkstationCluster() + client.get_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_workstation_cluster_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GetWorkstationClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_cluster), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.WorkstationCluster() + ) + await client.get_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_workstation_cluster_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.WorkstationCluster() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_workstation_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_workstation_cluster_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workstation_cluster( + workstations.GetWorkstationClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_workstation_cluster_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.WorkstationCluster() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.WorkstationCluster() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_workstation_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_workstation_cluster_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_workstation_cluster( + workstations.GetWorkstationClusterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListWorkstationClustersRequest, + dict, + ], +) +def test_list_workstation_clusters(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_workstation_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_workstation_clusters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + client.list_workstation_clusters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationClustersRequest() + + +@pytest.mark.asyncio +async def test_list_workstation_clusters_async( + transport: str = "grpc_asyncio", + request_type=workstations.ListWorkstationClustersRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_workstation_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationClustersAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_workstation_clusters_async_from_dict(): + await test_list_workstation_clusters_async(request_type=dict) + + +def test_list_workstation_clusters_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListWorkstationClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + call.return_value = workstations.ListWorkstationClustersResponse() + client.list_workstation_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_workstation_clusters_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListWorkstationClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationClustersResponse() + ) + await client.list_workstation_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_workstation_clusters_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationClustersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_workstation_clusters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_workstation_clusters_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workstation_clusters( + workstations.ListWorkstationClustersRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_workstation_clusters_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationClustersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationClustersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_workstation_clusters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_workstation_clusters_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_workstation_clusters( + workstations.ListWorkstationClustersRequest(), + parent="parent_value", + ) + + +def test_list_workstation_clusters_pager(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[], + next_page_token="def", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_workstation_clusters(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.WorkstationCluster) for i in results) + + +def test_list_workstation_clusters_pages(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[], + next_page_token="def", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + ), + RuntimeError, + ) + pages = list(client.list_workstation_clusters(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_workstation_clusters_async_pager(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[], + next_page_token="def", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_workstation_clusters( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, workstations.WorkstationCluster) for i in responses) + + +@pytest.mark.asyncio +async def test_list_workstation_clusters_async_pages(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[], + next_page_token="def", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_workstation_clusters(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.CreateWorkstationClusterRequest, + dict, + ], +) +def test_create_workstation_cluster(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_workstation_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_cluster), "__call__" + ) as call: + client.create_workstation_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationClusterRequest() + + +@pytest.mark.asyncio +async def test_create_workstation_cluster_async( + transport: str = "grpc_asyncio", + request_type=workstations.CreateWorkstationClusterRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_workstation_cluster_async_from_dict(): + await test_create_workstation_cluster_async(request_type=dict) + + +def test_create_workstation_cluster_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.CreateWorkstationClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_cluster), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_workstation_cluster_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.CreateWorkstationClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_cluster), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_workstation_cluster_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_workstation_cluster( + parent="parent_value", + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + workstation_cluster_id="workstation_cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workstation_cluster + mock_val = workstations.WorkstationCluster(name="name_value") + assert arg == mock_val + arg = args[0].workstation_cluster_id + mock_val = "workstation_cluster_id_value" + assert arg == mock_val + + +def test_create_workstation_cluster_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workstation_cluster( + workstations.CreateWorkstationClusterRequest(), + parent="parent_value", + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + workstation_cluster_id="workstation_cluster_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_workstation_cluster_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_workstation_cluster( + parent="parent_value", + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + workstation_cluster_id="workstation_cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workstation_cluster + mock_val = workstations.WorkstationCluster(name="name_value") + assert arg == mock_val + arg = args[0].workstation_cluster_id + mock_val = "workstation_cluster_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_workstation_cluster_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_workstation_cluster( + workstations.CreateWorkstationClusterRequest(), + parent="parent_value", + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + workstation_cluster_id="workstation_cluster_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.UpdateWorkstationClusterRequest, + dict, + ], +) +def test_update_workstation_cluster(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_workstation_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_cluster), "__call__" + ) as call: + client.update_workstation_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationClusterRequest() + + +@pytest.mark.asyncio +async def test_update_workstation_cluster_async( + transport: str = "grpc_asyncio", + request_type=workstations.UpdateWorkstationClusterRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_workstation_cluster_async_from_dict(): + await test_update_workstation_cluster_async(request_type=dict) + + +def test_update_workstation_cluster_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.UpdateWorkstationClusterRequest() + + request.workstation_cluster.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_cluster), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation_cluster.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_workstation_cluster_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.UpdateWorkstationClusterRequest() + + request.workstation_cluster.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_cluster), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation_cluster.name=name_value", + ) in kw["metadata"] + + +def test_update_workstation_cluster_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_workstation_cluster( + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].workstation_cluster + mock_val = workstations.WorkstationCluster(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_workstation_cluster_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workstation_cluster( + workstations.UpdateWorkstationClusterRequest(), + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_workstation_cluster_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_workstation_cluster( + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].workstation_cluster + mock_val = workstations.WorkstationCluster(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_workstation_cluster_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_workstation_cluster( + workstations.UpdateWorkstationClusterRequest(), + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.DeleteWorkstationClusterRequest, + dict, + ], +) +def test_delete_workstation_cluster(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_workstation_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_cluster), "__call__" + ) as call: + client.delete_workstation_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationClusterRequest() + + +@pytest.mark.asyncio +async def test_delete_workstation_cluster_async( + transport: str = "grpc_asyncio", + request_type=workstations.DeleteWorkstationClusterRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_workstation_cluster_async_from_dict(): + await test_delete_workstation_cluster_async(request_type=dict) + + +def test_delete_workstation_cluster_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.DeleteWorkstationClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_cluster), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_workstation_cluster_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.DeleteWorkstationClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_cluster), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_workstation_cluster_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_workstation_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_workstation_cluster_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workstation_cluster( + workstations.DeleteWorkstationClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_workstation_cluster_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_workstation_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_workstation_cluster_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_workstation_cluster( + workstations.DeleteWorkstationClusterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GetWorkstationConfigRequest, + dict, + ], +) +def test_get_workstation_config(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.WorkstationConfig( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + degraded=True, + ) + response = client.get_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.WorkstationConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.degraded is True + + +def test_get_workstation_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_config), "__call__" + ) as call: + client.get_workstation_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationConfigRequest() + + +@pytest.mark.asyncio +async def test_get_workstation_config_async( + transport: str = "grpc_asyncio", + request_type=workstations.GetWorkstationConfigRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.WorkstationConfig( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + degraded=True, + ) + ) + response = await client.get_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.WorkstationConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.degraded is True + + +@pytest.mark.asyncio +async def test_get_workstation_config_async_from_dict(): + await test_get_workstation_config_async(request_type=dict) + + +def test_get_workstation_config_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GetWorkstationConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_config), "__call__" + ) as call: + call.return_value = workstations.WorkstationConfig() + client.get_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_workstation_config_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GetWorkstationConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.WorkstationConfig() + ) + await client.get_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_workstation_config_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.WorkstationConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_workstation_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_workstation_config_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workstation_config( + workstations.GetWorkstationConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_workstation_config_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.WorkstationConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.WorkstationConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_workstation_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_workstation_config_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_workstation_config( + workstations.GetWorkstationConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListWorkstationConfigsRequest, + dict, + ], +) +def test_list_workstation_configs(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_workstation_configs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + client.list_workstation_configs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationConfigsRequest() + + +@pytest.mark.asyncio +async def test_list_workstation_configs_async( + transport: str = "grpc_asyncio", + request_type=workstations.ListWorkstationConfigsRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationConfigsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_workstation_configs_async_from_dict(): + await test_list_workstation_configs_async(request_type=dict) + + +def test_list_workstation_configs_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListWorkstationConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + call.return_value = workstations.ListWorkstationConfigsResponse() + client.list_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_workstation_configs_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListWorkstationConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationConfigsResponse() + ) + await client.list_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_workstation_configs_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_workstation_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_workstation_configs_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workstation_configs( + workstations.ListWorkstationConfigsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_workstation_configs_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationConfigsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_workstation_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_workstation_configs_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_workstation_configs( + workstations.ListWorkstationConfigsRequest(), + parent="parent_value", + ) + + +def test_list_workstation_configs_pager(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_workstation_configs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.WorkstationConfig) for i in results) + + +def test_list_workstation_configs_pages(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_workstation_configs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_workstation_configs_async_pager(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_workstation_configs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, workstations.WorkstationConfig) for i in responses) + + +@pytest.mark.asyncio +async def test_list_workstation_configs_async_pages(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_workstation_configs(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListUsableWorkstationConfigsRequest, + dict, + ], +) +def test_list_usable_workstation_configs(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListUsableWorkstationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_usable_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListUsableWorkstationConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableWorkstationConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_usable_workstation_configs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + client.list_usable_workstation_configs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListUsableWorkstationConfigsRequest() + + +@pytest.mark.asyncio +async def test_list_usable_workstation_configs_async( + transport: str = "grpc_asyncio", + request_type=workstations.ListUsableWorkstationConfigsRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListUsableWorkstationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_usable_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListUsableWorkstationConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableWorkstationConfigsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_usable_workstation_configs_async_from_dict(): + await test_list_usable_workstation_configs_async(request_type=dict) + + +def test_list_usable_workstation_configs_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListUsableWorkstationConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + call.return_value = workstations.ListUsableWorkstationConfigsResponse() + client.list_usable_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_usable_workstation_configs_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListUsableWorkstationConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListUsableWorkstationConfigsResponse() + ) + await client.list_usable_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_usable_workstation_configs_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListUsableWorkstationConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_usable_workstation_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_usable_workstation_configs_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_usable_workstation_configs( + workstations.ListUsableWorkstationConfigsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_usable_workstation_configs_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListUsableWorkstationConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListUsableWorkstationConfigsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_usable_workstation_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_usable_workstation_configs_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_usable_workstation_configs( + workstations.ListUsableWorkstationConfigsRequest(), + parent="parent_value", + ) + + +def test_list_usable_workstation_configs_pager(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_usable_workstation_configs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.WorkstationConfig) for i in results) + + +def test_list_usable_workstation_configs_pages(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_usable_workstation_configs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_usable_workstation_configs_async_pager(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_usable_workstation_configs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, workstations.WorkstationConfig) for i in responses) + + +@pytest.mark.asyncio +async def test_list_usable_workstation_configs_async_pages(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_usable_workstation_configs(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.CreateWorkstationConfigRequest, + dict, + ], +) +def test_create_workstation_config(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_workstation_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_config), "__call__" + ) as call: + client.create_workstation_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationConfigRequest() + + +@pytest.mark.asyncio +async def test_create_workstation_config_async( + transport: str = "grpc_asyncio", + request_type=workstations.CreateWorkstationConfigRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_workstation_config_async_from_dict(): + await test_create_workstation_config_async(request_type=dict) + + +def test_create_workstation_config_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.CreateWorkstationConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_workstation_config_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.CreateWorkstationConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_workstation_config_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_workstation_config( + parent="parent_value", + workstation_config=workstations.WorkstationConfig(name="name_value"), + workstation_config_id="workstation_config_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workstation_config + mock_val = workstations.WorkstationConfig(name="name_value") + assert arg == mock_val + arg = args[0].workstation_config_id + mock_val = "workstation_config_id_value" + assert arg == mock_val + + +def test_create_workstation_config_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workstation_config( + workstations.CreateWorkstationConfigRequest(), + parent="parent_value", + workstation_config=workstations.WorkstationConfig(name="name_value"), + workstation_config_id="workstation_config_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_workstation_config_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_workstation_config( + parent="parent_value", + workstation_config=workstations.WorkstationConfig(name="name_value"), + workstation_config_id="workstation_config_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workstation_config + mock_val = workstations.WorkstationConfig(name="name_value") + assert arg == mock_val + arg = args[0].workstation_config_id + mock_val = "workstation_config_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_workstation_config_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_workstation_config( + workstations.CreateWorkstationConfigRequest(), + parent="parent_value", + workstation_config=workstations.WorkstationConfig(name="name_value"), + workstation_config_id="workstation_config_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.UpdateWorkstationConfigRequest, + dict, + ], +) +def test_update_workstation_config(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_workstation_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_config), "__call__" + ) as call: + client.update_workstation_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationConfigRequest() + + +@pytest.mark.asyncio +async def test_update_workstation_config_async( + transport: str = "grpc_asyncio", + request_type=workstations.UpdateWorkstationConfigRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_workstation_config_async_from_dict(): + await test_update_workstation_config_async(request_type=dict) + + +def test_update_workstation_config_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.UpdateWorkstationConfigRequest() + + request.workstation_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation_config.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_workstation_config_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.UpdateWorkstationConfigRequest() + + request.workstation_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation_config.name=name_value", + ) in kw["metadata"] + + +def test_update_workstation_config_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_workstation_config( + workstation_config=workstations.WorkstationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].workstation_config + mock_val = workstations.WorkstationConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_workstation_config_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workstation_config( + workstations.UpdateWorkstationConfigRequest(), + workstation_config=workstations.WorkstationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_workstation_config_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_workstation_config( + workstation_config=workstations.WorkstationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].workstation_config + mock_val = workstations.WorkstationConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_workstation_config_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_workstation_config( + workstations.UpdateWorkstationConfigRequest(), + workstation_config=workstations.WorkstationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.DeleteWorkstationConfigRequest, + dict, + ], +) +def test_delete_workstation_config(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_workstation_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_config), "__call__" + ) as call: + client.delete_workstation_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationConfigRequest() + + +@pytest.mark.asyncio +async def test_delete_workstation_config_async( + transport: str = "grpc_asyncio", + request_type=workstations.DeleteWorkstationConfigRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_workstation_config_async_from_dict(): + await test_delete_workstation_config_async(request_type=dict) + + +def test_delete_workstation_config_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.DeleteWorkstationConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_workstation_config_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.DeleteWorkstationConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_workstation_config_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_workstation_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_workstation_config_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workstation_config( + workstations.DeleteWorkstationConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_workstation_config_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_workstation_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_workstation_config_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_workstation_config( + workstations.DeleteWorkstationConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GetWorkstationRequest, + dict, + ], +) +def test_get_workstation(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.Workstation( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + state=workstations.Workstation.State.STATE_STARTING, + host="host_value", + ) + response = client.get_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.Workstation) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.state == workstations.Workstation.State.STATE_STARTING + assert response.host == "host_value" + + +def test_get_workstation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workstation), "__call__") as call: + client.get_workstation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationRequest() + + +@pytest.mark.asyncio +async def test_get_workstation_async( + transport: str = "grpc_asyncio", request_type=workstations.GetWorkstationRequest +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.Workstation( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + state=workstations.Workstation.State.STATE_STARTING, + host="host_value", + ) + ) + response = await client.get_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.Workstation) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.state == workstations.Workstation.State.STATE_STARTING + assert response.host == "host_value" + + +@pytest.mark.asyncio +async def test_get_workstation_async_from_dict(): + await test_get_workstation_async(request_type=dict) + + +def test_get_workstation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GetWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workstation), "__call__") as call: + call.return_value = workstations.Workstation() + client.get_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_workstation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GetWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workstation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.Workstation() + ) + await client.get_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_workstation_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.Workstation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_workstation_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workstation( + workstations.GetWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_workstation_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.Workstation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.Workstation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_workstation_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_workstation( + workstations.GetWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListWorkstationsRequest, + dict, + ], +) +def test_list_workstations(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_workstations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + client.list_workstations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationsRequest() + + +@pytest.mark.asyncio +async def test_list_workstations_async( + transport: str = "grpc_asyncio", request_type=workstations.ListWorkstationsRequest +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_workstations_async_from_dict(): + await test_list_workstations_async(request_type=dict) + + +def test_list_workstations_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListWorkstationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + call.return_value = workstations.ListWorkstationsResponse() + client.list_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_workstations_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListWorkstationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationsResponse() + ) + await client.list_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_workstations_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_workstations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_workstations_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workstations( + workstations.ListWorkstationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_workstations_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_workstations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_workstations_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_workstations( + workstations.ListWorkstationsRequest(), + parent="parent_value", + ) + + +def test_list_workstations_pager(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_workstations(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.Workstation) for i in results) + + +def test_list_workstations_pages(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_workstations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_workstations_async_pager(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_workstations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, workstations.Workstation) for i in responses) + + +@pytest.mark.asyncio +async def test_list_workstations_async_pages(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_workstations(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListUsableWorkstationsRequest, + dict, + ], +) +def test_list_usable_workstations(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListUsableWorkstationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_usable_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListUsableWorkstationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableWorkstationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_usable_workstations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + client.list_usable_workstations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListUsableWorkstationsRequest() + + +@pytest.mark.asyncio +async def test_list_usable_workstations_async( + transport: str = "grpc_asyncio", + request_type=workstations.ListUsableWorkstationsRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListUsableWorkstationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_usable_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListUsableWorkstationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableWorkstationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_usable_workstations_async_from_dict(): + await test_list_usable_workstations_async(request_type=dict) + + +def test_list_usable_workstations_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListUsableWorkstationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + call.return_value = workstations.ListUsableWorkstationsResponse() + client.list_usable_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_usable_workstations_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListUsableWorkstationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListUsableWorkstationsResponse() + ) + await client.list_usable_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_usable_workstations_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListUsableWorkstationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_usable_workstations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_usable_workstations_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_usable_workstations( + workstations.ListUsableWorkstationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_usable_workstations_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListUsableWorkstationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListUsableWorkstationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_usable_workstations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_usable_workstations_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_usable_workstations( + workstations.ListUsableWorkstationsRequest(), + parent="parent_value", + ) + + +def test_list_usable_workstations_pager(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_usable_workstations(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.Workstation) for i in results) + + +def test_list_usable_workstations_pages(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_usable_workstations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_usable_workstations_async_pager(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_usable_workstations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, workstations.Workstation) for i in responses) + + +@pytest.mark.asyncio +async def test_list_usable_workstations_async_pages(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_usable_workstations(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.CreateWorkstationRequest, + dict, + ], +) +def test_create_workstation(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_workstation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation), "__call__" + ) as call: + client.create_workstation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationRequest() + + +@pytest.mark.asyncio +async def test_create_workstation_async( + transport: str = "grpc_asyncio", request_type=workstations.CreateWorkstationRequest +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_workstation_async_from_dict(): + await test_create_workstation_async(request_type=dict) + + +def test_create_workstation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.CreateWorkstationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_workstation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.CreateWorkstationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_workstation_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_workstation( + parent="parent_value", + workstation=workstations.Workstation(name="name_value"), + workstation_id="workstation_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workstation + mock_val = workstations.Workstation(name="name_value") + assert arg == mock_val + arg = args[0].workstation_id + mock_val = "workstation_id_value" + assert arg == mock_val + + +def test_create_workstation_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workstation( + workstations.CreateWorkstationRequest(), + parent="parent_value", + workstation=workstations.Workstation(name="name_value"), + workstation_id="workstation_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_workstation_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_workstation( + parent="parent_value", + workstation=workstations.Workstation(name="name_value"), + workstation_id="workstation_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workstation + mock_val = workstations.Workstation(name="name_value") + assert arg == mock_val + arg = args[0].workstation_id + mock_val = "workstation_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_workstation_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_workstation( + workstations.CreateWorkstationRequest(), + parent="parent_value", + workstation=workstations.Workstation(name="name_value"), + workstation_id="workstation_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.UpdateWorkstationRequest, + dict, + ], +) +def test_update_workstation(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_workstation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation), "__call__" + ) as call: + client.update_workstation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationRequest() + + +@pytest.mark.asyncio +async def test_update_workstation_async( + transport: str = "grpc_asyncio", request_type=workstations.UpdateWorkstationRequest +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_workstation_async_from_dict(): + await test_update_workstation_async(request_type=dict) + + +def test_update_workstation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.UpdateWorkstationRequest() + + request.workstation.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_workstation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.UpdateWorkstationRequest() + + request.workstation.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation.name=name_value", + ) in kw["metadata"] + + +def test_update_workstation_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_workstation( + workstation=workstations.Workstation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].workstation + mock_val = workstations.Workstation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_workstation_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workstation( + workstations.UpdateWorkstationRequest(), + workstation=workstations.Workstation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_workstation_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_workstation( + workstation=workstations.Workstation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].workstation + mock_val = workstations.Workstation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_workstation_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_workstation( + workstations.UpdateWorkstationRequest(), + workstation=workstations.Workstation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.DeleteWorkstationRequest, + dict, + ], +) +def test_delete_workstation(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_workstation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation), "__call__" + ) as call: + client.delete_workstation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationRequest() + + +@pytest.mark.asyncio +async def test_delete_workstation_async( + transport: str = "grpc_asyncio", request_type=workstations.DeleteWorkstationRequest +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_workstation_async_from_dict(): + await test_delete_workstation_async(request_type=dict) + + +def test_delete_workstation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.DeleteWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_workstation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.DeleteWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_workstation_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_workstation_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workstation( + workstations.DeleteWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_workstation_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_workstation_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_workstation( + workstations.DeleteWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.StartWorkstationRequest, + dict, + ], +) +def test_start_workstation(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.start_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.StartWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_start_workstation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_workstation), "__call__" + ) as call: + client.start_workstation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.StartWorkstationRequest() + + +@pytest.mark.asyncio +async def test_start_workstation_async( + transport: str = "grpc_asyncio", request_type=workstations.StartWorkstationRequest +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.start_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.StartWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_start_workstation_async_from_dict(): + await test_start_workstation_async(request_type=dict) + + +def test_start_workstation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.StartWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_workstation), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.start_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_start_workstation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.StartWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_workstation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.start_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_start_workstation_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.start_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_start_workstation_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_workstation( + workstations.StartWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_start_workstation_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.start_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_start_workstation_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.start_workstation( + workstations.StartWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.StopWorkstationRequest, + dict, + ], +) +def test_stop_workstation(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.stop_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.StopWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_stop_workstation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_workstation), "__call__") as call: + client.stop_workstation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.StopWorkstationRequest() + + +@pytest.mark.asyncio +async def test_stop_workstation_async( + transport: str = "grpc_asyncio", request_type=workstations.StopWorkstationRequest +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.stop_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.StopWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_stop_workstation_async_from_dict(): + await test_stop_workstation_async(request_type=dict) + + +def test_stop_workstation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.StopWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_workstation), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.stop_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_stop_workstation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.StopWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_workstation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.stop_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_stop_workstation_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.stop_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_stop_workstation_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_workstation( + workstations.StopWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_stop_workstation_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.stop_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_stop_workstation_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.stop_workstation( + workstations.StopWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GenerateAccessTokenRequest, + dict, + ], +) +def test_generate_access_token(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.GenerateAccessTokenResponse( + access_token="access_token_value", + ) + response = client.generate_access_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GenerateAccessTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.GenerateAccessTokenResponse) + assert response.access_token == "access_token_value" + + +def test_generate_access_token_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), "__call__" + ) as call: + client.generate_access_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GenerateAccessTokenRequest() + + +@pytest.mark.asyncio +async def test_generate_access_token_async( + transport: str = "grpc_asyncio", + request_type=workstations.GenerateAccessTokenRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.GenerateAccessTokenResponse( + access_token="access_token_value", + ) + ) + response = await client.generate_access_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GenerateAccessTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.GenerateAccessTokenResponse) + assert response.access_token == "access_token_value" + + +@pytest.mark.asyncio +async def test_generate_access_token_async_from_dict(): + await test_generate_access_token_async(request_type=dict) + + +def test_generate_access_token_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GenerateAccessTokenRequest() + + request.workstation = "workstation_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), "__call__" + ) as call: + call.return_value = workstations.GenerateAccessTokenResponse() + client.generate_access_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation=workstation_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_access_token_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GenerateAccessTokenRequest() + + request.workstation = "workstation_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.GenerateAccessTokenResponse() + ) + await client.generate_access_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation=workstation_value", + ) in kw["metadata"] + + +def test_generate_access_token_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.GenerateAccessTokenResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_access_token( + workstation="workstation_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].workstation + mock_val = "workstation_value" + assert arg == mock_val + + +def test_generate_access_token_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_access_token( + workstations.GenerateAccessTokenRequest(), + workstation="workstation_value", + ) + + +@pytest.mark.asyncio +async def test_generate_access_token_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.GenerateAccessTokenResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.GenerateAccessTokenResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_access_token( + workstation="workstation_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].workstation + mock_val = "workstation_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_access_token_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_access_token( + workstations.GenerateAccessTokenRequest(), + workstation="workstation_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GetWorkstationClusterRequest, + dict, + ], +) +def test_get_workstation_cluster_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.WorkstationCluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + network="network_value", + subnetwork="subnetwork_value", + control_plane_ip="control_plane_ip_value", + degraded=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.WorkstationCluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_workstation_cluster(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.WorkstationCluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.network == "network_value" + assert response.subnetwork == "subnetwork_value" + assert response.control_plane_ip == "control_plane_ip_value" + assert response.degraded is True + + +def test_get_workstation_cluster_rest_required_fields( + request_type=workstations.GetWorkstationClusterRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workstation_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workstation_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.WorkstationCluster() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.WorkstationCluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_workstation_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_workstation_cluster_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_workstation_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_workstation_cluster_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_get_workstation_cluster" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_get_workstation_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.GetWorkstationClusterRequest.pb( + workstations.GetWorkstationClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workstations.WorkstationCluster.to_json( + workstations.WorkstationCluster() + ) + + request = workstations.GetWorkstationClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.WorkstationCluster() + + client.get_workstation_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_workstation_cluster_rest_bad_request( + transport: str = "rest", request_type=workstations.GetWorkstationClusterRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_workstation_cluster(request) + + +def test_get_workstation_cluster_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.WorkstationCluster() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.WorkstationCluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_workstation_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/workstationClusters/*}" + % client.transport._host, + args[1], + ) + + +def test_get_workstation_cluster_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workstation_cluster( + workstations.GetWorkstationClusterRequest(), + name="name_value", + ) + + +def test_get_workstation_cluster_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListWorkstationClustersRequest, + dict, + ], +) +def test_list_workstation_clusters_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListWorkstationClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_workstation_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_workstation_clusters_rest_required_fields( + request_type=workstations.ListWorkstationClustersRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workstation_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workstation_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.ListWorkstationClustersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_workstation_clusters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_workstation_clusters_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_workstation_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_workstation_clusters_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_list_workstation_clusters" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_list_workstation_clusters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.ListWorkstationClustersRequest.pb( + workstations.ListWorkstationClustersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + workstations.ListWorkstationClustersResponse.to_json( + workstations.ListWorkstationClustersResponse() + ) + ) + + request = workstations.ListWorkstationClustersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.ListWorkstationClustersResponse() + + client.list_workstation_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_workstation_clusters_rest_bad_request( + transport: str = "rest", request_type=workstations.ListWorkstationClustersRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_workstation_clusters(request) + + +def test_list_workstation_clusters_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListWorkstationClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_workstation_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/workstationClusters" + % client.transport._host, + args[1], + ) + + +def test_list_workstation_clusters_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workstation_clusters( + workstations.ListWorkstationClustersRequest(), + parent="parent_value", + ) + + +def test_list_workstation_clusters_rest_pager(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[], + next_page_token="def", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + workstations.ListWorkstationClustersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_workstation_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.WorkstationCluster) for i in results) + + pages = list(client.list_workstation_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.CreateWorkstationClusterRequest, + dict, + ], +) +def test_create_workstation_cluster_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["workstation_cluster"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "network": "network_value", + "subnetwork": "subnetwork_value", + "control_plane_ip": "control_plane_ip_value", + "private_cluster_config": { + "enable_private_endpoint": True, + "cluster_hostname": "cluster_hostname_value", + "service_attachment_uri": "service_attachment_uri_value", + "allowed_projects": ["allowed_projects_value1", "allowed_projects_value2"], + }, + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_workstation_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_workstation_cluster_rest_required_fields( + request_type=workstations.CreateWorkstationClusterRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["workstation_cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "workstationClusterId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workstation_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "workstationClusterId" in jsonified_request + assert ( + jsonified_request["workstationClusterId"] + == request_init["workstation_cluster_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["workstationClusterId"] = "workstation_cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workstation_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "validate_only", + "workstation_cluster_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "workstationClusterId" in jsonified_request + assert jsonified_request["workstationClusterId"] == "workstation_cluster_id_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_workstation_cluster(request) + + expected_params = [ + ( + "workstationClusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_workstation_cluster_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_workstation_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "validateOnly", + "workstationClusterId", + ) + ) + & set( + ( + "parent", + "workstationClusterId", + "workstationCluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_workstation_cluster_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_create_workstation_cluster" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_create_workstation_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.CreateWorkstationClusterRequest.pb( + workstations.CreateWorkstationClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.CreateWorkstationClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_workstation_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_workstation_cluster_rest_bad_request( + transport: str = "rest", request_type=workstations.CreateWorkstationClusterRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["workstation_cluster"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "network": "network_value", + "subnetwork": "subnetwork_value", + "control_plane_ip": "control_plane_ip_value", + "private_cluster_config": { + "enable_private_endpoint": True, + "cluster_hostname": "cluster_hostname_value", + "service_attachment_uri": "service_attachment_uri_value", + "allowed_projects": ["allowed_projects_value1", "allowed_projects_value2"], + }, + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_workstation_cluster(request) + + +def test_create_workstation_cluster_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + workstation_cluster_id="workstation_cluster_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_workstation_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/workstationClusters" + % client.transport._host, + args[1], + ) + + +def test_create_workstation_cluster_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workstation_cluster( + workstations.CreateWorkstationClusterRequest(), + parent="parent_value", + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + workstation_cluster_id="workstation_cluster_id_value", + ) + + +def test_create_workstation_cluster_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.UpdateWorkstationClusterRequest, + dict, + ], +) +def test_update_workstation_cluster_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation_cluster": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + } + request_init["workstation_cluster"] = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "network": "network_value", + "subnetwork": "subnetwork_value", + "control_plane_ip": "control_plane_ip_value", + "private_cluster_config": { + "enable_private_endpoint": True, + "cluster_hostname": "cluster_hostname_value", + "service_attachment_uri": "service_attachment_uri_value", + "allowed_projects": ["allowed_projects_value1", "allowed_projects_value2"], + }, + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_workstation_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_workstation_cluster_rest_required_fields( + request_type=workstations.UpdateWorkstationClusterRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workstation_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workstation_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_workstation_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_workstation_cluster_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_workstation_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "workstationCluster", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_workstation_cluster_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_update_workstation_cluster" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_update_workstation_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.UpdateWorkstationClusterRequest.pb( + workstations.UpdateWorkstationClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.UpdateWorkstationClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_workstation_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_workstation_cluster_rest_bad_request( + transport: str = "rest", request_type=workstations.UpdateWorkstationClusterRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation_cluster": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + } + request_init["workstation_cluster"] = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "network": "network_value", + "subnetwork": "subnetwork_value", + "control_plane_ip": "control_plane_ip_value", + "private_cluster_config": { + "enable_private_endpoint": True, + "cluster_hostname": "cluster_hostname_value", + "service_attachment_uri": "service_attachment_uri_value", + "allowed_projects": ["allowed_projects_value1", "allowed_projects_value2"], + }, + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_workstation_cluster(request) + + +def test_update_workstation_cluster_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "workstation_cluster": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_workstation_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{workstation_cluster.name=projects/*/locations/*/workstationClusters/*}" + % client.transport._host, + args[1], + ) + + +def test_update_workstation_cluster_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workstation_cluster( + workstations.UpdateWorkstationClusterRequest(), + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_workstation_cluster_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.DeleteWorkstationClusterRequest, + dict, + ], +) +def test_delete_workstation_cluster_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_workstation_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_workstation_cluster_rest_required_fields( + request_type=workstations.DeleteWorkstationClusterRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workstation_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workstation_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "force", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_workstation_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_workstation_cluster_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_workstation_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "force", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_workstation_cluster_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_delete_workstation_cluster" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_delete_workstation_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.DeleteWorkstationClusterRequest.pb( + workstations.DeleteWorkstationClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.DeleteWorkstationClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_workstation_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_workstation_cluster_rest_bad_request( + transport: str = "rest", request_type=workstations.DeleteWorkstationClusterRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_workstation_cluster(request) + + +def test_delete_workstation_cluster_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_workstation_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/workstationClusters/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_workstation_cluster_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workstation_cluster( + workstations.DeleteWorkstationClusterRequest(), + name="name_value", + ) + + +def test_delete_workstation_cluster_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GetWorkstationConfigRequest, + dict, + ], +) +def test_get_workstation_config_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.WorkstationConfig( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + degraded=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.WorkstationConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_workstation_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.WorkstationConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.degraded is True + + +def test_get_workstation_config_rest_required_fields( + request_type=workstations.GetWorkstationConfigRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workstation_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workstation_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.WorkstationConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.WorkstationConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_workstation_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_workstation_config_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_workstation_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_workstation_config_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_get_workstation_config" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_get_workstation_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.GetWorkstationConfigRequest.pb( + workstations.GetWorkstationConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workstations.WorkstationConfig.to_json( + workstations.WorkstationConfig() + ) + + request = workstations.GetWorkstationConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.WorkstationConfig() + + client.get_workstation_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_workstation_config_rest_bad_request( + transport: str = "rest", request_type=workstations.GetWorkstationConfigRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_workstation_config(request) + + +def test_get_workstation_config_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.WorkstationConfig() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.WorkstationConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_workstation_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_get_workstation_config_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workstation_config( + workstations.GetWorkstationConfigRequest(), + name="name_value", + ) + + +def test_get_workstation_config_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListWorkstationConfigsRequest, + dict, + ], +) +def test_list_workstation_configs_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListWorkstationConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_workstation_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_workstation_configs_rest_required_fields( + request_type=workstations.ListWorkstationConfigsRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workstation_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workstation_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.ListWorkstationConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_workstation_configs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_workstation_configs_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_workstation_configs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_workstation_configs_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_list_workstation_configs" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_list_workstation_configs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.ListWorkstationConfigsRequest.pb( + workstations.ListWorkstationConfigsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workstations.ListWorkstationConfigsResponse.to_json( + workstations.ListWorkstationConfigsResponse() + ) + + request = workstations.ListWorkstationConfigsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.ListWorkstationConfigsResponse() + + client.list_workstation_configs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_workstation_configs_rest_bad_request( + transport: str = "rest", request_type=workstations.ListWorkstationConfigsRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_workstation_configs(request) + + +def test_list_workstation_configs_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListWorkstationConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_workstation_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/workstationClusters/*}/workstationConfigs" + % client.transport._host, + args[1], + ) + + +def test_list_workstation_configs_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workstation_configs( + workstations.ListWorkstationConfigsRequest(), + parent="parent_value", + ) + + +def test_list_workstation_configs_rest_pager(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + workstations.ListWorkstationConfigsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + + pager = client.list_workstation_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.WorkstationConfig) for i in results) + + pages = list(client.list_workstation_configs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListUsableWorkstationConfigsRequest, + dict, + ], +) +def test_list_usable_workstation_configs_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListUsableWorkstationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListUsableWorkstationConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_usable_workstation_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableWorkstationConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_usable_workstation_configs_rest_required_fields( + request_type=workstations.ListUsableWorkstationConfigsRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_usable_workstation_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_usable_workstation_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.ListUsableWorkstationConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.ListUsableWorkstationConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_usable_workstation_configs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_usable_workstation_configs_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_usable_workstation_configs._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_usable_workstation_configs_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_list_usable_workstation_configs" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_list_usable_workstation_configs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.ListUsableWorkstationConfigsRequest.pb( + workstations.ListUsableWorkstationConfigsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + workstations.ListUsableWorkstationConfigsResponse.to_json( + workstations.ListUsableWorkstationConfigsResponse() + ) + ) + + request = workstations.ListUsableWorkstationConfigsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.ListUsableWorkstationConfigsResponse() + + client.list_usable_workstation_configs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_usable_workstation_configs_rest_bad_request( + transport: str = "rest", + request_type=workstations.ListUsableWorkstationConfigsRequest, +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_usable_workstation_configs(request) + + +def test_list_usable_workstation_configs_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListUsableWorkstationConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListUsableWorkstationConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_usable_workstation_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/workstationClusters/*}/workstationConfigs:listUsable" + % client.transport._host, + args[1], + ) + + +def test_list_usable_workstation_configs_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_usable_workstation_configs( + workstations.ListUsableWorkstationConfigsRequest(), + parent="parent_value", + ) + + +def test_list_usable_workstation_configs_rest_pager(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + workstations.ListUsableWorkstationConfigsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + + pager = client.list_usable_workstation_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.WorkstationConfig) for i in results) + + pages = list( + client.list_usable_workstation_configs(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.CreateWorkstationConfigRequest, + dict, + ], +) +def test_create_workstation_config_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request_init["workstation_config"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "idle_timeout": {"seconds": 751, "nanos": 543}, + "running_timeout": {}, + "host": { + "gce_instance": { + "machine_type": "machine_type_value", + "service_account": "service_account_value", + "tags": ["tags_value1", "tags_value2"], + "pool_size": 980, + "pooled_instances": 1706, + "disable_public_ip_addresses": True, + "enable_nested_virtualization": True, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": {"enable_confidential_compute": True}, + "boot_disk_size_gb": 1792, + } + }, + "persistent_directories": [ + { + "gce_pd": { + "size_gb": 739, + "fs_type": "fs_type_value", + "disk_type": "disk_type_value", + "source_snapshot": "source_snapshot_value", + "reclaim_policy": 1, + }, + "mount_path": "mount_path_value", + } + ], + "container": { + "image": "image_value", + "command": ["command_value1", "command_value2"], + "args": ["args_value1", "args_value2"], + "env": {}, + "working_dir": "working_dir_value", + "run_as_user": 1190, + }, + "encryption_key": { + "kms_key": "kms_key_value", + "kms_key_service_account": "kms_key_service_account_value", + }, + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_workstation_config(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_workstation_config_rest_required_fields( + request_type=workstations.CreateWorkstationConfigRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["workstation_config_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "workstationConfigId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workstation_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "workstationConfigId" in jsonified_request + assert ( + jsonified_request["workstationConfigId"] + == request_init["workstation_config_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["workstationConfigId"] = "workstation_config_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workstation_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "validate_only", + "workstation_config_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "workstationConfigId" in jsonified_request + assert jsonified_request["workstationConfigId"] == "workstation_config_id_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_workstation_config(request) + + expected_params = [ + ( + "workstationConfigId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_workstation_config_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_workstation_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "validateOnly", + "workstationConfigId", + ) + ) + & set( + ( + "parent", + "workstationConfigId", + "workstationConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_workstation_config_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_create_workstation_config" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_create_workstation_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.CreateWorkstationConfigRequest.pb( + workstations.CreateWorkstationConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.CreateWorkstationConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_workstation_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_workstation_config_rest_bad_request( + transport: str = "rest", request_type=workstations.CreateWorkstationConfigRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request_init["workstation_config"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "idle_timeout": {"seconds": 751, "nanos": 543}, + "running_timeout": {}, + "host": { + "gce_instance": { + "machine_type": "machine_type_value", + "service_account": "service_account_value", + "tags": ["tags_value1", "tags_value2"], + "pool_size": 980, + "pooled_instances": 1706, + "disable_public_ip_addresses": True, + "enable_nested_virtualization": True, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": {"enable_confidential_compute": True}, + "boot_disk_size_gb": 1792, + } + }, + "persistent_directories": [ + { + "gce_pd": { + "size_gb": 739, + "fs_type": "fs_type_value", + "disk_type": "disk_type_value", + "source_snapshot": "source_snapshot_value", + "reclaim_policy": 1, + }, + "mount_path": "mount_path_value", + } + ], + "container": { + "image": "image_value", + "command": ["command_value1", "command_value2"], + "args": ["args_value1", "args_value2"], + "env": {}, + "working_dir": "working_dir_value", + "run_as_user": 1190, + }, + "encryption_key": { + "kms_key": "kms_key_value", + "kms_key_service_account": "kms_key_service_account_value", + }, + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_workstation_config(request) + + +def test_create_workstation_config_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + workstation_config=workstations.WorkstationConfig(name="name_value"), + workstation_config_id="workstation_config_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_workstation_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/workstationClusters/*}/workstationConfigs" + % client.transport._host, + args[1], + ) + + +def test_create_workstation_config_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workstation_config( + workstations.CreateWorkstationConfigRequest(), + parent="parent_value", + workstation_config=workstations.WorkstationConfig(name="name_value"), + workstation_config_id="workstation_config_id_value", + ) + + +def test_create_workstation_config_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.UpdateWorkstationConfigRequest, + dict, + ], +) +def test_update_workstation_config_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation_config": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + } + request_init["workstation_config"] = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "idle_timeout": {"seconds": 751, "nanos": 543}, + "running_timeout": {}, + "host": { + "gce_instance": { + "machine_type": "machine_type_value", + "service_account": "service_account_value", + "tags": ["tags_value1", "tags_value2"], + "pool_size": 980, + "pooled_instances": 1706, + "disable_public_ip_addresses": True, + "enable_nested_virtualization": True, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": {"enable_confidential_compute": True}, + "boot_disk_size_gb": 1792, + } + }, + "persistent_directories": [ + { + "gce_pd": { + "size_gb": 739, + "fs_type": "fs_type_value", + "disk_type": "disk_type_value", + "source_snapshot": "source_snapshot_value", + "reclaim_policy": 1, + }, + "mount_path": "mount_path_value", + } + ], + "container": { + "image": "image_value", + "command": ["command_value1", "command_value2"], + "args": ["args_value1", "args_value2"], + "env": {}, + "working_dir": "working_dir_value", + "run_as_user": 1190, + }, + "encryption_key": { + "kms_key": "kms_key_value", + "kms_key_service_account": "kms_key_service_account_value", + }, + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_workstation_config(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_workstation_config_rest_required_fields( + request_type=workstations.UpdateWorkstationConfigRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workstation_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workstation_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_workstation_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_workstation_config_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_workstation_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "workstationConfig", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_workstation_config_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_update_workstation_config" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_update_workstation_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.UpdateWorkstationConfigRequest.pb( + workstations.UpdateWorkstationConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.UpdateWorkstationConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_workstation_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_workstation_config_rest_bad_request( + transport: str = "rest", request_type=workstations.UpdateWorkstationConfigRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation_config": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + } + request_init["workstation_config"] = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "idle_timeout": {"seconds": 751, "nanos": 543}, + "running_timeout": {}, + "host": { + "gce_instance": { + "machine_type": "machine_type_value", + "service_account": "service_account_value", + "tags": ["tags_value1", "tags_value2"], + "pool_size": 980, + "pooled_instances": 1706, + "disable_public_ip_addresses": True, + "enable_nested_virtualization": True, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": {"enable_confidential_compute": True}, + "boot_disk_size_gb": 1792, + } + }, + "persistent_directories": [ + { + "gce_pd": { + "size_gb": 739, + "fs_type": "fs_type_value", + "disk_type": "disk_type_value", + "source_snapshot": "source_snapshot_value", + "reclaim_policy": 1, + }, + "mount_path": "mount_path_value", + } + ], + "container": { + "image": "image_value", + "command": ["command_value1", "command_value2"], + "args": ["args_value1", "args_value2"], + "env": {}, + "working_dir": "working_dir_value", + "run_as_user": 1190, + }, + "encryption_key": { + "kms_key": "kms_key_value", + "kms_key_service_account": "kms_key_service_account_value", + }, + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_workstation_config(request) + + +def test_update_workstation_config_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "workstation_config": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + workstation_config=workstations.WorkstationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_workstation_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{workstation_config.name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_update_workstation_config_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workstation_config( + workstations.UpdateWorkstationConfigRequest(), + workstation_config=workstations.WorkstationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_workstation_config_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.DeleteWorkstationConfigRequest, + dict, + ], +) +def test_delete_workstation_config_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_workstation_config(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_workstation_config_rest_required_fields( + request_type=workstations.DeleteWorkstationConfigRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workstation_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workstation_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "force", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_workstation_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_workstation_config_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_workstation_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "force", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_workstation_config_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_delete_workstation_config" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_delete_workstation_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.DeleteWorkstationConfigRequest.pb( + workstations.DeleteWorkstationConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.DeleteWorkstationConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_workstation_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_workstation_config_rest_bad_request( + transport: str = "rest", request_type=workstations.DeleteWorkstationConfigRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_workstation_config(request) + + +def test_delete_workstation_config_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_workstation_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_workstation_config_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workstation_config( + workstations.DeleteWorkstationConfigRequest(), + name="name_value", + ) + + +def test_delete_workstation_config_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GetWorkstationRequest, + dict, + ], +) +def test_get_workstation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.Workstation( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + state=workstations.Workstation.State.STATE_STARTING, + host="host_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.Workstation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_workstation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.Workstation) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.state == workstations.Workstation.State.STATE_STARTING + assert response.host == "host_value" + + +def test_get_workstation_rest_required_fields( + request_type=workstations.GetWorkstationRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.Workstation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.Workstation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_workstation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_workstation_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_workstation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_workstation_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_get_workstation" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_get_workstation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.GetWorkstationRequest.pb( + workstations.GetWorkstationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workstations.Workstation.to_json( + workstations.Workstation() + ) + + request = workstations.GetWorkstationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.Workstation() + + client.get_workstation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_workstation_rest_bad_request( + transport: str = "rest", request_type=workstations.GetWorkstationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_workstation(request) + + +def test_get_workstation_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.Workstation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.Workstation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_workstation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_workstation_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workstation( + workstations.GetWorkstationRequest(), + name="name_value", + ) + + +def test_get_workstation_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListWorkstationsRequest, + dict, + ], +) +def test_list_workstations_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListWorkstationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_workstations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_workstations_rest_required_fields( + request_type=workstations.ListWorkstationsRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workstations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workstations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.ListWorkstationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_workstations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_workstations_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_workstations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_workstations_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_list_workstations" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_list_workstations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.ListWorkstationsRequest.pb( + workstations.ListWorkstationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workstations.ListWorkstationsResponse.to_json( + workstations.ListWorkstationsResponse() + ) + + request = workstations.ListWorkstationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.ListWorkstationsResponse() + + client.list_workstations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_workstations_rest_bad_request( + transport: str = "rest", request_type=workstations.ListWorkstationsRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_workstations(request) + + +def test_list_workstations_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListWorkstationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_workstations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}/workstations" + % client.transport._host, + args[1], + ) + + +def test_list_workstations_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workstations( + workstations.ListWorkstationsRequest(), + parent="parent_value", + ) + + +def test_list_workstations_rest_pager(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + workstations.ListWorkstationsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + + pager = client.list_workstations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.Workstation) for i in results) + + pages = list(client.list_workstations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListUsableWorkstationsRequest, + dict, + ], +) +def test_list_usable_workstations_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListUsableWorkstationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListUsableWorkstationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_usable_workstations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableWorkstationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_usable_workstations_rest_required_fields( + request_type=workstations.ListUsableWorkstationsRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_usable_workstations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_usable_workstations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.ListUsableWorkstationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.ListUsableWorkstationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_usable_workstations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_usable_workstations_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_usable_workstations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_usable_workstations_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_list_usable_workstations" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_list_usable_workstations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.ListUsableWorkstationsRequest.pb( + workstations.ListUsableWorkstationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workstations.ListUsableWorkstationsResponse.to_json( + workstations.ListUsableWorkstationsResponse() + ) + + request = workstations.ListUsableWorkstationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.ListUsableWorkstationsResponse() + + client.list_usable_workstations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_usable_workstations_rest_bad_request( + transport: str = "rest", request_type=workstations.ListUsableWorkstationsRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_usable_workstations(request) + + +def test_list_usable_workstations_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListUsableWorkstationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListUsableWorkstationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_usable_workstations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}/workstations:listUsable" + % client.transport._host, + args[1], + ) + + +def test_list_usable_workstations_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_usable_workstations( + workstations.ListUsableWorkstationsRequest(), + parent="parent_value", + ) + + +def test_list_usable_workstations_rest_pager(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + workstations.ListUsableWorkstationsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + + pager = client.list_usable_workstations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.Workstation) for i in results) + + pages = list(client.list_usable_workstations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.CreateWorkstationRequest, + dict, + ], +) +def test_create_workstation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request_init["workstation"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "state": 1, + "host": "host_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_workstation(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_workstation_rest_required_fields( + request_type=workstations.CreateWorkstationRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["workstation_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "workstationId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "workstationId" in jsonified_request + assert jsonified_request["workstationId"] == request_init["workstation_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["workstationId"] = "workstation_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workstation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "validate_only", + "workstation_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "workstationId" in jsonified_request + assert jsonified_request["workstationId"] == "workstation_id_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_workstation(request) + + expected_params = [ + ( + "workstationId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_workstation_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_workstation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "validateOnly", + "workstationId", + ) + ) + & set( + ( + "parent", + "workstationId", + "workstation", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_workstation_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_create_workstation" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_create_workstation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.CreateWorkstationRequest.pb( + workstations.CreateWorkstationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.CreateWorkstationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_workstation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_workstation_rest_bad_request( + transport: str = "rest", request_type=workstations.CreateWorkstationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request_init["workstation"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "state": 1, + "host": "host_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_workstation(request) + + +def test_create_workstation_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + workstation=workstations.Workstation(name="name_value"), + workstation_id="workstation_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_workstation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}/workstations" + % client.transport._host, + args[1], + ) + + +def test_create_workstation_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workstation( + workstations.CreateWorkstationRequest(), + parent="parent_value", + workstation=workstations.Workstation(name="name_value"), + workstation_id="workstation_id_value", + ) + + +def test_create_workstation_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.UpdateWorkstationRequest, + dict, + ], +) +def test_update_workstation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + } + request_init["workstation"] = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "state": 1, + "host": "host_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_workstation(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_workstation_rest_required_fields( + request_type=workstations.UpdateWorkstationRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workstation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_workstation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_workstation_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_workstation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "workstation", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_workstation_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_update_workstation" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_update_workstation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.UpdateWorkstationRequest.pb( + workstations.UpdateWorkstationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.UpdateWorkstationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_workstation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_workstation_rest_bad_request( + transport: str = "rest", request_type=workstations.UpdateWorkstationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + } + request_init["workstation"] = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "state": 1, + "host": "host_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_workstation(request) + + +def test_update_workstation_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "workstation": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + } + + # get truthy value for each flattened field + mock_args = dict( + workstation=workstations.Workstation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_workstation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{workstation.name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}" + % client.transport._host, + args[1], + ) + + +def test_update_workstation_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workstation( + workstations.UpdateWorkstationRequest(), + workstation=workstations.Workstation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_workstation_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.DeleteWorkstationRequest, + dict, + ], +) +def test_delete_workstation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_workstation(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_workstation_rest_required_fields( + request_type=workstations.DeleteWorkstationRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workstation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_workstation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_workstation_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_workstation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_workstation_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_delete_workstation" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_delete_workstation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.DeleteWorkstationRequest.pb( + workstations.DeleteWorkstationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.DeleteWorkstationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_workstation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_workstation_rest_bad_request( + transport: str = "rest", request_type=workstations.DeleteWorkstationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_workstation(request) + + +def test_delete_workstation_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_workstation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_workstation_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workstation( + workstations.DeleteWorkstationRequest(), + name="name_value", + ) + + +def test_delete_workstation_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.StartWorkstationRequest, + dict, + ], +) +def test_start_workstation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.start_workstation(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_start_workstation_rest_required_fields( + request_type=workstations.StartWorkstationRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).start_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).start_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.start_workstation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_start_workstation_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.start_workstation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_workstation_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_start_workstation" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_start_workstation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.StartWorkstationRequest.pb( + workstations.StartWorkstationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.StartWorkstationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.start_workstation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_workstation_rest_bad_request( + transport: str = "rest", request_type=workstations.StartWorkstationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_workstation(request) + + +def test_start_workstation_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.start_workstation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:start" + % client.transport._host, + args[1], + ) + + +def test_start_workstation_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_workstation( + workstations.StartWorkstationRequest(), + name="name_value", + ) + + +def test_start_workstation_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.StopWorkstationRequest, + dict, + ], +) +def test_stop_workstation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.stop_workstation(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_stop_workstation_rest_required_fields( + request_type=workstations.StopWorkstationRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stop_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stop_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.stop_workstation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_stop_workstation_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.stop_workstation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_workstation_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_stop_workstation" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_stop_workstation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.StopWorkstationRequest.pb( + workstations.StopWorkstationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.StopWorkstationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.stop_workstation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_workstation_rest_bad_request( + transport: str = "rest", request_type=workstations.StopWorkstationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop_workstation(request) + + +def test_stop_workstation_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.stop_workstation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:stop" + % client.transport._host, + args[1], + ) + + +def test_stop_workstation_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_workstation( + workstations.StopWorkstationRequest(), + name="name_value", + ) + + +def test_stop_workstation_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GenerateAccessTokenRequest, + dict, + ], +) +def test_generate_access_token_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.GenerateAccessTokenResponse( + access_token="access_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.GenerateAccessTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_access_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.GenerateAccessTokenResponse) + assert response.access_token == "access_token_value" + + +def test_generate_access_token_rest_required_fields( + request_type=workstations.GenerateAccessTokenRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["workstation"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_access_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["workstation"] = "workstation_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_access_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "workstation" in jsonified_request + assert jsonified_request["workstation"] == "workstation_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.GenerateAccessTokenResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.GenerateAccessTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.generate_access_token(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_access_token_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_access_token._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("workstation",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_access_token_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_generate_access_token" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_generate_access_token" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.GenerateAccessTokenRequest.pb( + workstations.GenerateAccessTokenRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workstations.GenerateAccessTokenResponse.to_json( + workstations.GenerateAccessTokenResponse() + ) + + request = workstations.GenerateAccessTokenRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.GenerateAccessTokenResponse() + + client.generate_access_token( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_access_token_rest_bad_request( + transport: str = "rest", request_type=workstations.GenerateAccessTokenRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_access_token(request) + + +def test_generate_access_token_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.GenerateAccessTokenResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "workstation": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + workstation="workstation_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.GenerateAccessTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.generate_access_token(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{workstation=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:generateAccessToken" + % client.transport._host, + args[1], + ) + + +def test_generate_access_token_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_access_token( + workstations.GenerateAccessTokenRequest(), + workstation="workstation_value", + ) + + +def test_generate_access_token_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.WorkstationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.WorkstationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = WorkstationsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.WorkstationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = WorkstationsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = WorkstationsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.WorkstationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = WorkstationsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.WorkstationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = WorkstationsClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.WorkstationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.WorkstationsGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkstationsGrpcTransport, + transports.WorkstationsGrpcAsyncIOTransport, + transports.WorkstationsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = WorkstationsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.WorkstationsGrpcTransport, + ) + + +def test_workstations_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.WorkstationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_workstations_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.workstations_v1.services.workstations.transports.WorkstationsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.WorkstationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_workstation_cluster", + "list_workstation_clusters", + "create_workstation_cluster", + "update_workstation_cluster", + "delete_workstation_cluster", + "get_workstation_config", + "list_workstation_configs", + "list_usable_workstation_configs", + "create_workstation_config", + "update_workstation_config", + "delete_workstation_config", + "get_workstation", + "list_workstations", + "list_usable_workstations", + "create_workstation", + "update_workstation", + "delete_workstation", + "start_workstation", + "stop_workstation", + "generate_access_token", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_workstations_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.workstations_v1.services.workstations.transports.WorkstationsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.WorkstationsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_workstations_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.workstations_v1.services.workstations.transports.WorkstationsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.WorkstationsTransport() + adc.assert_called_once() + + +def test_workstations_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + WorkstationsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkstationsGrpcTransport, + transports.WorkstationsGrpcAsyncIOTransport, + ], +) +def test_workstations_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkstationsGrpcTransport, + transports.WorkstationsGrpcAsyncIOTransport, + transports.WorkstationsRestTransport, + ], +) +def test_workstations_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.WorkstationsGrpcTransport, grpc_helpers), + (transports.WorkstationsGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_workstations_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "workstations.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="workstations.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.WorkstationsGrpcTransport, transports.WorkstationsGrpcAsyncIOTransport], +) +def test_workstations_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_workstations_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.WorkstationsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_workstations_rest_lro_client(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_workstations_host_no_port(transport_name): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="workstations.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "workstations.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://workstations.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_workstations_host_with_port(transport_name): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="workstations.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "workstations.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://workstations.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_workstations_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = WorkstationsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = WorkstationsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_workstation_cluster._session + session2 = client2.transport.get_workstation_cluster._session + assert session1 != session2 + session1 = client1.transport.list_workstation_clusters._session + session2 = client2.transport.list_workstation_clusters._session + assert session1 != session2 + session1 = client1.transport.create_workstation_cluster._session + session2 = client2.transport.create_workstation_cluster._session + assert session1 != session2 + session1 = client1.transport.update_workstation_cluster._session + session2 = client2.transport.update_workstation_cluster._session + assert session1 != session2 + session1 = client1.transport.delete_workstation_cluster._session + session2 = client2.transport.delete_workstation_cluster._session + assert session1 != session2 + session1 = client1.transport.get_workstation_config._session + session2 = client2.transport.get_workstation_config._session + assert session1 != session2 + session1 = client1.transport.list_workstation_configs._session + session2 = client2.transport.list_workstation_configs._session + assert session1 != session2 + session1 = client1.transport.list_usable_workstation_configs._session + session2 = client2.transport.list_usable_workstation_configs._session + assert session1 != session2 + session1 = client1.transport.create_workstation_config._session + session2 = client2.transport.create_workstation_config._session + assert session1 != session2 + session1 = client1.transport.update_workstation_config._session + session2 = client2.transport.update_workstation_config._session + assert session1 != session2 + session1 = client1.transport.delete_workstation_config._session + session2 = client2.transport.delete_workstation_config._session + assert session1 != session2 + session1 = client1.transport.get_workstation._session + session2 = client2.transport.get_workstation._session + assert session1 != session2 + session1 = client1.transport.list_workstations._session + session2 = client2.transport.list_workstations._session + assert session1 != session2 + session1 = client1.transport.list_usable_workstations._session + session2 = client2.transport.list_usable_workstations._session + assert session1 != session2 + session1 = client1.transport.create_workstation._session + session2 = client2.transport.create_workstation._session + assert session1 != session2 + session1 = client1.transport.update_workstation._session + session2 = client2.transport.update_workstation._session + assert session1 != session2 + session1 = client1.transport.delete_workstation._session + session2 = client2.transport.delete_workstation._session + assert session1 != session2 + session1 = client1.transport.start_workstation._session + session2 = client2.transport.start_workstation._session + assert session1 != session2 + session1 = client1.transport.stop_workstation._session + session2 = client2.transport.stop_workstation._session + assert session1 != session2 + session1 = client1.transport.generate_access_token._session + session2 = client2.transport.generate_access_token._session + assert session1 != session2 + + +def test_workstations_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.WorkstationsGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_workstations_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.WorkstationsGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.WorkstationsGrpcTransport, transports.WorkstationsGrpcAsyncIOTransport], +) +def test_workstations_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.WorkstationsGrpcTransport, transports.WorkstationsGrpcAsyncIOTransport], +) +def test_workstations_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_workstations_grpc_lro_client(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_workstations_grpc_lro_async_client(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_workstation_path(): + project = "squid" + location = "clam" + workstation_cluster = "whelk" + workstation_config = "octopus" + workstation = "oyster" + expected = "projects/{project}/locations/{location}/workstationClusters/{workstation_cluster}/workstationConfigs/{workstation_config}/workstations/{workstation}".format( + project=project, + location=location, + workstation_cluster=workstation_cluster, + workstation_config=workstation_config, + workstation=workstation, + ) + actual = WorkstationsClient.workstation_path( + project, location, workstation_cluster, workstation_config, workstation + ) + assert expected == actual + + +def test_parse_workstation_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "workstation_cluster": "mussel", + "workstation_config": "winkle", + "workstation": "nautilus", + } + path = WorkstationsClient.workstation_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_workstation_path(path) + assert expected == actual + + +def test_workstation_cluster_path(): + project = "scallop" + location = "abalone" + workstation_cluster = "squid" + expected = "projects/{project}/locations/{location}/workstationClusters/{workstation_cluster}".format( + project=project, + location=location, + workstation_cluster=workstation_cluster, + ) + actual = WorkstationsClient.workstation_cluster_path( + project, location, workstation_cluster + ) + assert expected == actual + + +def test_parse_workstation_cluster_path(): + expected = { + "project": "clam", + "location": "whelk", + "workstation_cluster": "octopus", + } + path = WorkstationsClient.workstation_cluster_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_workstation_cluster_path(path) + assert expected == actual + + +def test_workstation_config_path(): + project = "oyster" + location = "nudibranch" + workstation_cluster = "cuttlefish" + workstation_config = "mussel" + expected = "projects/{project}/locations/{location}/workstationClusters/{workstation_cluster}/workstationConfigs/{workstation_config}".format( + project=project, + location=location, + workstation_cluster=workstation_cluster, + workstation_config=workstation_config, + ) + actual = WorkstationsClient.workstation_config_path( + project, location, workstation_cluster, workstation_config + ) + assert expected == actual + + +def test_parse_workstation_config_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "workstation_cluster": "scallop", + "workstation_config": "abalone", + } + path = WorkstationsClient.workstation_config_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_workstation_config_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = WorkstationsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = WorkstationsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = WorkstationsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = WorkstationsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = WorkstationsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = WorkstationsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = WorkstationsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = WorkstationsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = WorkstationsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = WorkstationsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.WorkstationsTransport, "_prep_wrapped_messages" + ) as prep: + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.WorkstationsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = WorkstationsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "resource": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "resource": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "resource": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (WorkstationsClient, transports.WorkstationsGrpcTransport), + (WorkstationsAsyncClient, transports.WorkstationsGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1beta/__init__.py b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1beta/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1beta/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1beta/test_workstations.py b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1beta/test_workstations.py new file mode 100644 index 000000000000..9241ad424991 --- /dev/null +++ b/packages/google-cloud-workstations/tests/unit/gapic/workstations_v1beta/test_workstations.py @@ -0,0 +1,15756 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.workstations_v1beta.services.workstations import ( + WorkstationsAsyncClient, + WorkstationsClient, + pagers, + transports, +) +from google.cloud.workstations_v1beta.types import workstations + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert WorkstationsClient._get_default_mtls_endpoint(None) is None + assert ( + WorkstationsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + WorkstationsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + WorkstationsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + WorkstationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert WorkstationsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (WorkstationsClient, "grpc"), + (WorkstationsAsyncClient, "grpc_asyncio"), + (WorkstationsClient, "rest"), + ], +) +def test_workstations_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "workstations.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://workstations.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.WorkstationsGrpcTransport, "grpc"), + (transports.WorkstationsGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.WorkstationsRestTransport, "rest"), + ], +) +def test_workstations_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (WorkstationsClient, "grpc"), + (WorkstationsAsyncClient, "grpc_asyncio"), + (WorkstationsClient, "rest"), + ], +) +def test_workstations_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "workstations.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://workstations.googleapis.com" + ) + + +def test_workstations_client_get_transport_class(): + transport = WorkstationsClient.get_transport_class() + available_transports = [ + transports.WorkstationsGrpcTransport, + transports.WorkstationsRestTransport, + ] + assert transport in available_transports + + transport = WorkstationsClient.get_transport_class("grpc") + assert transport == transports.WorkstationsGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (WorkstationsClient, transports.WorkstationsGrpcTransport, "grpc"), + ( + WorkstationsAsyncClient, + transports.WorkstationsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (WorkstationsClient, transports.WorkstationsRestTransport, "rest"), + ], +) +@mock.patch.object( + WorkstationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(WorkstationsClient) +) +@mock.patch.object( + WorkstationsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(WorkstationsAsyncClient), +) +def test_workstations_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(WorkstationsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(WorkstationsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (WorkstationsClient, transports.WorkstationsGrpcTransport, "grpc", "true"), + ( + WorkstationsAsyncClient, + transports.WorkstationsGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (WorkstationsClient, transports.WorkstationsGrpcTransport, "grpc", "false"), + ( + WorkstationsAsyncClient, + transports.WorkstationsGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (WorkstationsClient, transports.WorkstationsRestTransport, "rest", "true"), + (WorkstationsClient, transports.WorkstationsRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + WorkstationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(WorkstationsClient) +) +@mock.patch.object( + WorkstationsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(WorkstationsAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_workstations_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [WorkstationsClient, WorkstationsAsyncClient]) +@mock.patch.object( + WorkstationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(WorkstationsClient) +) +@mock.patch.object( + WorkstationsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(WorkstationsAsyncClient), +) +def test_workstations_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (WorkstationsClient, transports.WorkstationsGrpcTransport, "grpc"), + ( + WorkstationsAsyncClient, + transports.WorkstationsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (WorkstationsClient, transports.WorkstationsRestTransport, "rest"), + ], +) +def test_workstations_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + WorkstationsClient, + transports.WorkstationsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + WorkstationsAsyncClient, + transports.WorkstationsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (WorkstationsClient, transports.WorkstationsRestTransport, "rest", None), + ], +) +def test_workstations_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_workstations_client_client_options_from_dict(): + with mock.patch( + "google.cloud.workstations_v1beta.services.workstations.transports.WorkstationsGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = WorkstationsClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + WorkstationsClient, + transports.WorkstationsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + WorkstationsAsyncClient, + transports.WorkstationsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_workstations_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "workstations.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="workstations.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GetWorkstationClusterRequest, + dict, + ], +) +def test_get_workstation_cluster(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.WorkstationCluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + network="network_value", + subnetwork="subnetwork_value", + control_plane_ip="control_plane_ip_value", + degraded=True, + ) + response = client.get_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.WorkstationCluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.network == "network_value" + assert response.subnetwork == "subnetwork_value" + assert response.control_plane_ip == "control_plane_ip_value" + assert response.degraded is True + + +def test_get_workstation_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_cluster), "__call__" + ) as call: + client.get_workstation_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationClusterRequest() + + +@pytest.mark.asyncio +async def test_get_workstation_cluster_async( + transport: str = "grpc_asyncio", + request_type=workstations.GetWorkstationClusterRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.WorkstationCluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + network="network_value", + subnetwork="subnetwork_value", + control_plane_ip="control_plane_ip_value", + degraded=True, + ) + ) + response = await client.get_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.WorkstationCluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.network == "network_value" + assert response.subnetwork == "subnetwork_value" + assert response.control_plane_ip == "control_plane_ip_value" + assert response.degraded is True + + +@pytest.mark.asyncio +async def test_get_workstation_cluster_async_from_dict(): + await test_get_workstation_cluster_async(request_type=dict) + + +def test_get_workstation_cluster_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GetWorkstationClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_cluster), "__call__" + ) as call: + call.return_value = workstations.WorkstationCluster() + client.get_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_workstation_cluster_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GetWorkstationClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_cluster), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.WorkstationCluster() + ) + await client.get_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_workstation_cluster_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.WorkstationCluster() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_workstation_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_workstation_cluster_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workstation_cluster( + workstations.GetWorkstationClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_workstation_cluster_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.WorkstationCluster() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.WorkstationCluster() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_workstation_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_workstation_cluster_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_workstation_cluster( + workstations.GetWorkstationClusterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListWorkstationClustersRequest, + dict, + ], +) +def test_list_workstation_clusters(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_workstation_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_workstation_clusters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + client.list_workstation_clusters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationClustersRequest() + + +@pytest.mark.asyncio +async def test_list_workstation_clusters_async( + transport: str = "grpc_asyncio", + request_type=workstations.ListWorkstationClustersRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_workstation_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationClustersAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_workstation_clusters_async_from_dict(): + await test_list_workstation_clusters_async(request_type=dict) + + +def test_list_workstation_clusters_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListWorkstationClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + call.return_value = workstations.ListWorkstationClustersResponse() + client.list_workstation_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_workstation_clusters_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListWorkstationClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationClustersResponse() + ) + await client.list_workstation_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_workstation_clusters_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationClustersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_workstation_clusters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_workstation_clusters_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workstation_clusters( + workstations.ListWorkstationClustersRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_workstation_clusters_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationClustersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationClustersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_workstation_clusters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_workstation_clusters_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_workstation_clusters( + workstations.ListWorkstationClustersRequest(), + parent="parent_value", + ) + + +def test_list_workstation_clusters_pager(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[], + next_page_token="def", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_workstation_clusters(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.WorkstationCluster) for i in results) + + +def test_list_workstation_clusters_pages(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[], + next_page_token="def", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + ), + RuntimeError, + ) + pages = list(client.list_workstation_clusters(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_workstation_clusters_async_pager(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[], + next_page_token="def", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_workstation_clusters( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, workstations.WorkstationCluster) for i in responses) + + +@pytest.mark.asyncio +async def test_list_workstation_clusters_async_pages(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_clusters), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[], + next_page_token="def", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_workstation_clusters(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.CreateWorkstationClusterRequest, + dict, + ], +) +def test_create_workstation_cluster(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_workstation_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_cluster), "__call__" + ) as call: + client.create_workstation_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationClusterRequest() + + +@pytest.mark.asyncio +async def test_create_workstation_cluster_async( + transport: str = "grpc_asyncio", + request_type=workstations.CreateWorkstationClusterRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_workstation_cluster_async_from_dict(): + await test_create_workstation_cluster_async(request_type=dict) + + +def test_create_workstation_cluster_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.CreateWorkstationClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_cluster), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_workstation_cluster_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.CreateWorkstationClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_cluster), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_workstation_cluster_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_workstation_cluster( + parent="parent_value", + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + workstation_cluster_id="workstation_cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workstation_cluster + mock_val = workstations.WorkstationCluster(name="name_value") + assert arg == mock_val + arg = args[0].workstation_cluster_id + mock_val = "workstation_cluster_id_value" + assert arg == mock_val + + +def test_create_workstation_cluster_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workstation_cluster( + workstations.CreateWorkstationClusterRequest(), + parent="parent_value", + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + workstation_cluster_id="workstation_cluster_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_workstation_cluster_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_workstation_cluster( + parent="parent_value", + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + workstation_cluster_id="workstation_cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workstation_cluster + mock_val = workstations.WorkstationCluster(name="name_value") + assert arg == mock_val + arg = args[0].workstation_cluster_id + mock_val = "workstation_cluster_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_workstation_cluster_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_workstation_cluster( + workstations.CreateWorkstationClusterRequest(), + parent="parent_value", + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + workstation_cluster_id="workstation_cluster_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.UpdateWorkstationClusterRequest, + dict, + ], +) +def test_update_workstation_cluster(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_workstation_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_cluster), "__call__" + ) as call: + client.update_workstation_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationClusterRequest() + + +@pytest.mark.asyncio +async def test_update_workstation_cluster_async( + transport: str = "grpc_asyncio", + request_type=workstations.UpdateWorkstationClusterRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_workstation_cluster_async_from_dict(): + await test_update_workstation_cluster_async(request_type=dict) + + +def test_update_workstation_cluster_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.UpdateWorkstationClusterRequest() + + request.workstation_cluster.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_cluster), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation_cluster.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_workstation_cluster_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.UpdateWorkstationClusterRequest() + + request.workstation_cluster.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_cluster), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation_cluster.name=name_value", + ) in kw["metadata"] + + +def test_update_workstation_cluster_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_workstation_cluster( + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].workstation_cluster + mock_val = workstations.WorkstationCluster(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_workstation_cluster_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workstation_cluster( + workstations.UpdateWorkstationClusterRequest(), + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_workstation_cluster_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_workstation_cluster( + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].workstation_cluster + mock_val = workstations.WorkstationCluster(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_workstation_cluster_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_workstation_cluster( + workstations.UpdateWorkstationClusterRequest(), + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.DeleteWorkstationClusterRequest, + dict, + ], +) +def test_delete_workstation_cluster(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_workstation_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_cluster), "__call__" + ) as call: + client.delete_workstation_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationClusterRequest() + + +@pytest.mark.asyncio +async def test_delete_workstation_cluster_async( + transport: str = "grpc_asyncio", + request_type=workstations.DeleteWorkstationClusterRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_workstation_cluster_async_from_dict(): + await test_delete_workstation_cluster_async(request_type=dict) + + +def test_delete_workstation_cluster_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.DeleteWorkstationClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_cluster), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_workstation_cluster_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.DeleteWorkstationClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_cluster), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_workstation_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_workstation_cluster_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_workstation_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_workstation_cluster_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workstation_cluster( + workstations.DeleteWorkstationClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_workstation_cluster_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_workstation_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_workstation_cluster_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_workstation_cluster( + workstations.DeleteWorkstationClusterRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GetWorkstationConfigRequest, + dict, + ], +) +def test_get_workstation_config(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.WorkstationConfig( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + degraded=True, + enable_audit_agent=True, + ) + response = client.get_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.WorkstationConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.degraded is True + assert response.enable_audit_agent is True + + +def test_get_workstation_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_config), "__call__" + ) as call: + client.get_workstation_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationConfigRequest() + + +@pytest.mark.asyncio +async def test_get_workstation_config_async( + transport: str = "grpc_asyncio", + request_type=workstations.GetWorkstationConfigRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.WorkstationConfig( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + degraded=True, + enable_audit_agent=True, + ) + ) + response = await client.get_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.WorkstationConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.degraded is True + assert response.enable_audit_agent is True + + +@pytest.mark.asyncio +async def test_get_workstation_config_async_from_dict(): + await test_get_workstation_config_async(request_type=dict) + + +def test_get_workstation_config_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GetWorkstationConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_config), "__call__" + ) as call: + call.return_value = workstations.WorkstationConfig() + client.get_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_workstation_config_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GetWorkstationConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.WorkstationConfig() + ) + await client.get_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_workstation_config_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.WorkstationConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_workstation_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_workstation_config_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workstation_config( + workstations.GetWorkstationConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_workstation_config_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.WorkstationConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.WorkstationConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_workstation_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_workstation_config_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_workstation_config( + workstations.GetWorkstationConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListWorkstationConfigsRequest, + dict, + ], +) +def test_list_workstation_configs(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_workstation_configs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + client.list_workstation_configs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationConfigsRequest() + + +@pytest.mark.asyncio +async def test_list_workstation_configs_async( + transport: str = "grpc_asyncio", + request_type=workstations.ListWorkstationConfigsRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationConfigsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_workstation_configs_async_from_dict(): + await test_list_workstation_configs_async(request_type=dict) + + +def test_list_workstation_configs_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListWorkstationConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + call.return_value = workstations.ListWorkstationConfigsResponse() + client.list_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_workstation_configs_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListWorkstationConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationConfigsResponse() + ) + await client.list_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_workstation_configs_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_workstation_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_workstation_configs_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workstation_configs( + workstations.ListWorkstationConfigsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_workstation_configs_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationConfigsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_workstation_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_workstation_configs_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_workstation_configs( + workstations.ListWorkstationConfigsRequest(), + parent="parent_value", + ) + + +def test_list_workstation_configs_pager(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_workstation_configs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.WorkstationConfig) for i in results) + + +def test_list_workstation_configs_pages(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_workstation_configs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_workstation_configs_async_pager(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_workstation_configs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, workstations.WorkstationConfig) for i in responses) + + +@pytest.mark.asyncio +async def test_list_workstation_configs_async_pages(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstation_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_workstation_configs(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListUsableWorkstationConfigsRequest, + dict, + ], +) +def test_list_usable_workstation_configs(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListUsableWorkstationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_usable_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListUsableWorkstationConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableWorkstationConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_usable_workstation_configs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + client.list_usable_workstation_configs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListUsableWorkstationConfigsRequest() + + +@pytest.mark.asyncio +async def test_list_usable_workstation_configs_async( + transport: str = "grpc_asyncio", + request_type=workstations.ListUsableWorkstationConfigsRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListUsableWorkstationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_usable_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListUsableWorkstationConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableWorkstationConfigsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_usable_workstation_configs_async_from_dict(): + await test_list_usable_workstation_configs_async(request_type=dict) + + +def test_list_usable_workstation_configs_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListUsableWorkstationConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + call.return_value = workstations.ListUsableWorkstationConfigsResponse() + client.list_usable_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_usable_workstation_configs_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListUsableWorkstationConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListUsableWorkstationConfigsResponse() + ) + await client.list_usable_workstation_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_usable_workstation_configs_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListUsableWorkstationConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_usable_workstation_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_usable_workstation_configs_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_usable_workstation_configs( + workstations.ListUsableWorkstationConfigsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_usable_workstation_configs_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListUsableWorkstationConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListUsableWorkstationConfigsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_usable_workstation_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_usable_workstation_configs_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_usable_workstation_configs( + workstations.ListUsableWorkstationConfigsRequest(), + parent="parent_value", + ) + + +def test_list_usable_workstation_configs_pager(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_usable_workstation_configs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.WorkstationConfig) for i in results) + + +def test_list_usable_workstation_configs_pages(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_usable_workstation_configs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_usable_workstation_configs_async_pager(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_usable_workstation_configs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, workstations.WorkstationConfig) for i in responses) + + +@pytest.mark.asyncio +async def test_list_usable_workstation_configs_async_pages(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstation_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_usable_workstation_configs(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.CreateWorkstationConfigRequest, + dict, + ], +) +def test_create_workstation_config(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_workstation_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_config), "__call__" + ) as call: + client.create_workstation_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationConfigRequest() + + +@pytest.mark.asyncio +async def test_create_workstation_config_async( + transport: str = "grpc_asyncio", + request_type=workstations.CreateWorkstationConfigRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_workstation_config_async_from_dict(): + await test_create_workstation_config_async(request_type=dict) + + +def test_create_workstation_config_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.CreateWorkstationConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_workstation_config_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.CreateWorkstationConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_workstation_config_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_workstation_config( + parent="parent_value", + workstation_config=workstations.WorkstationConfig(name="name_value"), + workstation_config_id="workstation_config_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workstation_config + mock_val = workstations.WorkstationConfig(name="name_value") + assert arg == mock_val + arg = args[0].workstation_config_id + mock_val = "workstation_config_id_value" + assert arg == mock_val + + +def test_create_workstation_config_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workstation_config( + workstations.CreateWorkstationConfigRequest(), + parent="parent_value", + workstation_config=workstations.WorkstationConfig(name="name_value"), + workstation_config_id="workstation_config_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_workstation_config_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_workstation_config( + parent="parent_value", + workstation_config=workstations.WorkstationConfig(name="name_value"), + workstation_config_id="workstation_config_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workstation_config + mock_val = workstations.WorkstationConfig(name="name_value") + assert arg == mock_val + arg = args[0].workstation_config_id + mock_val = "workstation_config_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_workstation_config_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_workstation_config( + workstations.CreateWorkstationConfigRequest(), + parent="parent_value", + workstation_config=workstations.WorkstationConfig(name="name_value"), + workstation_config_id="workstation_config_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.UpdateWorkstationConfigRequest, + dict, + ], +) +def test_update_workstation_config(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_workstation_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_config), "__call__" + ) as call: + client.update_workstation_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationConfigRequest() + + +@pytest.mark.asyncio +async def test_update_workstation_config_async( + transport: str = "grpc_asyncio", + request_type=workstations.UpdateWorkstationConfigRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_workstation_config_async_from_dict(): + await test_update_workstation_config_async(request_type=dict) + + +def test_update_workstation_config_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.UpdateWorkstationConfigRequest() + + request.workstation_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation_config.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_workstation_config_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.UpdateWorkstationConfigRequest() + + request.workstation_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation_config.name=name_value", + ) in kw["metadata"] + + +def test_update_workstation_config_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_workstation_config( + workstation_config=workstations.WorkstationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].workstation_config + mock_val = workstations.WorkstationConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_workstation_config_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workstation_config( + workstations.UpdateWorkstationConfigRequest(), + workstation_config=workstations.WorkstationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_workstation_config_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_workstation_config( + workstation_config=workstations.WorkstationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].workstation_config + mock_val = workstations.WorkstationConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_workstation_config_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_workstation_config( + workstations.UpdateWorkstationConfigRequest(), + workstation_config=workstations.WorkstationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.DeleteWorkstationConfigRequest, + dict, + ], +) +def test_delete_workstation_config(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_workstation_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_config), "__call__" + ) as call: + client.delete_workstation_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationConfigRequest() + + +@pytest.mark.asyncio +async def test_delete_workstation_config_async( + transport: str = "grpc_asyncio", + request_type=workstations.DeleteWorkstationConfigRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_workstation_config_async_from_dict(): + await test_delete_workstation_config_async(request_type=dict) + + +def test_delete_workstation_config_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.DeleteWorkstationConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_workstation_config_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.DeleteWorkstationConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_workstation_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_workstation_config_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_workstation_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_workstation_config_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workstation_config( + workstations.DeleteWorkstationConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_workstation_config_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_workstation_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_workstation_config_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_workstation_config( + workstations.DeleteWorkstationConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GetWorkstationRequest, + dict, + ], +) +def test_get_workstation(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.Workstation( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + state=workstations.Workstation.State.STATE_STARTING, + host="host_value", + ) + response = client.get_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.Workstation) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.state == workstations.Workstation.State.STATE_STARTING + assert response.host == "host_value" + + +def test_get_workstation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workstation), "__call__") as call: + client.get_workstation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationRequest() + + +@pytest.mark.asyncio +async def test_get_workstation_async( + transport: str = "grpc_asyncio", request_type=workstations.GetWorkstationRequest +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.Workstation( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + state=workstations.Workstation.State.STATE_STARTING, + host="host_value", + ) + ) + response = await client.get_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GetWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.Workstation) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.state == workstations.Workstation.State.STATE_STARTING + assert response.host == "host_value" + + +@pytest.mark.asyncio +async def test_get_workstation_async_from_dict(): + await test_get_workstation_async(request_type=dict) + + +def test_get_workstation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GetWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workstation), "__call__") as call: + call.return_value = workstations.Workstation() + client.get_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_workstation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GetWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workstation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.Workstation() + ) + await client.get_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_workstation_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.Workstation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_workstation_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workstation( + workstations.GetWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_workstation_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.Workstation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.Workstation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_workstation_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_workstation( + workstations.GetWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListWorkstationsRequest, + dict, + ], +) +def test_list_workstations(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_workstations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + client.list_workstations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationsRequest() + + +@pytest.mark.asyncio +async def test_list_workstations_async( + transport: str = "grpc_asyncio", request_type=workstations.ListWorkstationsRequest +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListWorkstationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_workstations_async_from_dict(): + await test_list_workstations_async(request_type=dict) + + +def test_list_workstations_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListWorkstationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + call.return_value = workstations.ListWorkstationsResponse() + client.list_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_workstations_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListWorkstationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationsResponse() + ) + await client.list_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_workstations_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_workstations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_workstations_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workstations( + workstations.ListWorkstationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_workstations_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListWorkstationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListWorkstationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_workstations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_workstations_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_workstations( + workstations.ListWorkstationsRequest(), + parent="parent_value", + ) + + +def test_list_workstations_pager(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_workstations(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.Workstation) for i in results) + + +def test_list_workstations_pages(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_workstations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_workstations_async_pager(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_workstations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, workstations.Workstation) for i in responses) + + +@pytest.mark.asyncio +async def test_list_workstations_async_pages(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workstations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_workstations(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListUsableWorkstationsRequest, + dict, + ], +) +def test_list_usable_workstations(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListUsableWorkstationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_usable_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListUsableWorkstationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableWorkstationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_usable_workstations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + client.list_usable_workstations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListUsableWorkstationsRequest() + + +@pytest.mark.asyncio +async def test_list_usable_workstations_async( + transport: str = "grpc_asyncio", + request_type=workstations.ListUsableWorkstationsRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListUsableWorkstationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_usable_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.ListUsableWorkstationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableWorkstationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_usable_workstations_async_from_dict(): + await test_list_usable_workstations_async(request_type=dict) + + +def test_list_usable_workstations_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListUsableWorkstationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + call.return_value = workstations.ListUsableWorkstationsResponse() + client.list_usable_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_usable_workstations_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.ListUsableWorkstationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListUsableWorkstationsResponse() + ) + await client.list_usable_workstations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_usable_workstations_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListUsableWorkstationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_usable_workstations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_usable_workstations_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_usable_workstations( + workstations.ListUsableWorkstationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_usable_workstations_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.ListUsableWorkstationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.ListUsableWorkstationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_usable_workstations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_usable_workstations_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_usable_workstations( + workstations.ListUsableWorkstationsRequest(), + parent="parent_value", + ) + + +def test_list_usable_workstations_pager(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_usable_workstations(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.Workstation) for i in results) + + +def test_list_usable_workstations_pages(transport_name: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_usable_workstations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_usable_workstations_async_pager(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_usable_workstations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, workstations.Workstation) for i in responses) + + +@pytest.mark.asyncio +async def test_list_usable_workstations_async_pages(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_workstations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_usable_workstations(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.CreateWorkstationRequest, + dict, + ], +) +def test_create_workstation(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_workstation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation), "__call__" + ) as call: + client.create_workstation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationRequest() + + +@pytest.mark.asyncio +async def test_create_workstation_async( + transport: str = "grpc_asyncio", request_type=workstations.CreateWorkstationRequest +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.CreateWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_workstation_async_from_dict(): + await test_create_workstation_async(request_type=dict) + + +def test_create_workstation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.CreateWorkstationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_workstation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.CreateWorkstationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_workstation_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_workstation( + parent="parent_value", + workstation=workstations.Workstation(name="name_value"), + workstation_id="workstation_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workstation + mock_val = workstations.Workstation(name="name_value") + assert arg == mock_val + arg = args[0].workstation_id + mock_val = "workstation_id_value" + assert arg == mock_val + + +def test_create_workstation_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workstation( + workstations.CreateWorkstationRequest(), + parent="parent_value", + workstation=workstations.Workstation(name="name_value"), + workstation_id="workstation_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_workstation_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_workstation( + parent="parent_value", + workstation=workstations.Workstation(name="name_value"), + workstation_id="workstation_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workstation + mock_val = workstations.Workstation(name="name_value") + assert arg == mock_val + arg = args[0].workstation_id + mock_val = "workstation_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_workstation_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_workstation( + workstations.CreateWorkstationRequest(), + parent="parent_value", + workstation=workstations.Workstation(name="name_value"), + workstation_id="workstation_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.UpdateWorkstationRequest, + dict, + ], +) +def test_update_workstation(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_workstation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation), "__call__" + ) as call: + client.update_workstation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationRequest() + + +@pytest.mark.asyncio +async def test_update_workstation_async( + transport: str = "grpc_asyncio", request_type=workstations.UpdateWorkstationRequest +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.UpdateWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_workstation_async_from_dict(): + await test_update_workstation_async(request_type=dict) + + +def test_update_workstation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.UpdateWorkstationRequest() + + request.workstation.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_workstation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.UpdateWorkstationRequest() + + request.workstation.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation.name=name_value", + ) in kw["metadata"] + + +def test_update_workstation_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_workstation( + workstation=workstations.Workstation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].workstation + mock_val = workstations.Workstation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_workstation_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workstation( + workstations.UpdateWorkstationRequest(), + workstation=workstations.Workstation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_workstation_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_workstation( + workstation=workstations.Workstation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].workstation + mock_val = workstations.Workstation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_workstation_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_workstation( + workstations.UpdateWorkstationRequest(), + workstation=workstations.Workstation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.DeleteWorkstationRequest, + dict, + ], +) +def test_delete_workstation(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_workstation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation), "__call__" + ) as call: + client.delete_workstation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationRequest() + + +@pytest.mark.asyncio +async def test_delete_workstation_async( + transport: str = "grpc_asyncio", request_type=workstations.DeleteWorkstationRequest +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.DeleteWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_workstation_async_from_dict(): + await test_delete_workstation_async(request_type=dict) + + +def test_delete_workstation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.DeleteWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_workstation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.DeleteWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_workstation_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_workstation_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workstation( + workstations.DeleteWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_workstation_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_workstation_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_workstation( + workstations.DeleteWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.StartWorkstationRequest, + dict, + ], +) +def test_start_workstation(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.start_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.StartWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_start_workstation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_workstation), "__call__" + ) as call: + client.start_workstation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.StartWorkstationRequest() + + +@pytest.mark.asyncio +async def test_start_workstation_async( + transport: str = "grpc_asyncio", request_type=workstations.StartWorkstationRequest +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.start_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.StartWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_start_workstation_async_from_dict(): + await test_start_workstation_async(request_type=dict) + + +def test_start_workstation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.StartWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_workstation), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.start_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_start_workstation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.StartWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_workstation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.start_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_start_workstation_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.start_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_start_workstation_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_workstation( + workstations.StartWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_start_workstation_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_workstation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.start_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_start_workstation_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.start_workstation( + workstations.StartWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.StopWorkstationRequest, + dict, + ], +) +def test_stop_workstation(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.stop_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.StopWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_stop_workstation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_workstation), "__call__") as call: + client.stop_workstation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.StopWorkstationRequest() + + +@pytest.mark.asyncio +async def test_stop_workstation_async( + transport: str = "grpc_asyncio", request_type=workstations.StopWorkstationRequest +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.stop_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.StopWorkstationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_stop_workstation_async_from_dict(): + await test_stop_workstation_async(request_type=dict) + + +def test_stop_workstation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.StopWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_workstation), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.stop_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_stop_workstation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.StopWorkstationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_workstation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.stop_workstation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_stop_workstation_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.stop_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_stop_workstation_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_workstation( + workstations.StopWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_stop_workstation_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_workstation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.stop_workstation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_stop_workstation_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.stop_workstation( + workstations.StopWorkstationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GenerateAccessTokenRequest, + dict, + ], +) +def test_generate_access_token(request_type, transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.GenerateAccessTokenResponse( + access_token="access_token_value", + ) + response = client.generate_access_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GenerateAccessTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.GenerateAccessTokenResponse) + assert response.access_token == "access_token_value" + + +def test_generate_access_token_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), "__call__" + ) as call: + client.generate_access_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GenerateAccessTokenRequest() + + +@pytest.mark.asyncio +async def test_generate_access_token_async( + transport: str = "grpc_asyncio", + request_type=workstations.GenerateAccessTokenRequest, +): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.GenerateAccessTokenResponse( + access_token="access_token_value", + ) + ) + response = await client.generate_access_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workstations.GenerateAccessTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.GenerateAccessTokenResponse) + assert response.access_token == "access_token_value" + + +@pytest.mark.asyncio +async def test_generate_access_token_async_from_dict(): + await test_generate_access_token_async(request_type=dict) + + +def test_generate_access_token_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GenerateAccessTokenRequest() + + request.workstation = "workstation_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), "__call__" + ) as call: + call.return_value = workstations.GenerateAccessTokenResponse() + client.generate_access_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation=workstation_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_access_token_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workstations.GenerateAccessTokenRequest() + + request.workstation = "workstation_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.GenerateAccessTokenResponse() + ) + await client.generate_access_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workstation=workstation_value", + ) in kw["metadata"] + + +def test_generate_access_token_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.GenerateAccessTokenResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_access_token( + workstation="workstation_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].workstation + mock_val = "workstation_value" + assert arg == mock_val + + +def test_generate_access_token_flattened_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_access_token( + workstations.GenerateAccessTokenRequest(), + workstation="workstation_value", + ) + + +@pytest.mark.asyncio +async def test_generate_access_token_flattened_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workstations.GenerateAccessTokenResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workstations.GenerateAccessTokenResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_access_token( + workstation="workstation_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].workstation + mock_val = "workstation_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_access_token_flattened_error_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_access_token( + workstations.GenerateAccessTokenRequest(), + workstation="workstation_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GetWorkstationClusterRequest, + dict, + ], +) +def test_get_workstation_cluster_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.WorkstationCluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + network="network_value", + subnetwork="subnetwork_value", + control_plane_ip="control_plane_ip_value", + degraded=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.WorkstationCluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_workstation_cluster(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.WorkstationCluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.network == "network_value" + assert response.subnetwork == "subnetwork_value" + assert response.control_plane_ip == "control_plane_ip_value" + assert response.degraded is True + + +def test_get_workstation_cluster_rest_required_fields( + request_type=workstations.GetWorkstationClusterRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workstation_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workstation_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.WorkstationCluster() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.WorkstationCluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_workstation_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_workstation_cluster_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_workstation_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_workstation_cluster_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_get_workstation_cluster" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_get_workstation_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.GetWorkstationClusterRequest.pb( + workstations.GetWorkstationClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workstations.WorkstationCluster.to_json( + workstations.WorkstationCluster() + ) + + request = workstations.GetWorkstationClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.WorkstationCluster() + + client.get_workstation_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_workstation_cluster_rest_bad_request( + transport: str = "rest", request_type=workstations.GetWorkstationClusterRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_workstation_cluster(request) + + +def test_get_workstation_cluster_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.WorkstationCluster() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.WorkstationCluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_workstation_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/workstationClusters/*}" + % client.transport._host, + args[1], + ) + + +def test_get_workstation_cluster_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workstation_cluster( + workstations.GetWorkstationClusterRequest(), + name="name_value", + ) + + +def test_get_workstation_cluster_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListWorkstationClustersRequest, + dict, + ], +) +def test_list_workstation_clusters_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListWorkstationClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_workstation_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_workstation_clusters_rest_required_fields( + request_type=workstations.ListWorkstationClustersRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workstation_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workstation_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.ListWorkstationClustersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_workstation_clusters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_workstation_clusters_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_workstation_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_workstation_clusters_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_list_workstation_clusters" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_list_workstation_clusters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.ListWorkstationClustersRequest.pb( + workstations.ListWorkstationClustersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + workstations.ListWorkstationClustersResponse.to_json( + workstations.ListWorkstationClustersResponse() + ) + ) + + request = workstations.ListWorkstationClustersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.ListWorkstationClustersResponse() + + client.list_workstation_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_workstation_clusters_rest_bad_request( + transport: str = "rest", request_type=workstations.ListWorkstationClustersRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_workstation_clusters(request) + + +def test_list_workstation_clusters_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListWorkstationClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_workstation_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/workstationClusters" + % client.transport._host, + args[1], + ) + + +def test_list_workstation_clusters_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workstation_clusters( + workstations.ListWorkstationClustersRequest(), + parent="parent_value", + ) + + +def test_list_workstation_clusters_rest_pager(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[], + next_page_token="def", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationClustersResponse( + workstation_clusters=[ + workstations.WorkstationCluster(), + workstations.WorkstationCluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + workstations.ListWorkstationClustersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_workstation_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.WorkstationCluster) for i in results) + + pages = list(client.list_workstation_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.CreateWorkstationClusterRequest, + dict, + ], +) +def test_create_workstation_cluster_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["workstation_cluster"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "network": "network_value", + "subnetwork": "subnetwork_value", + "control_plane_ip": "control_plane_ip_value", + "private_cluster_config": { + "enable_private_endpoint": True, + "cluster_hostname": "cluster_hostname_value", + "service_attachment_uri": "service_attachment_uri_value", + "allowed_projects": ["allowed_projects_value1", "allowed_projects_value2"], + }, + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_workstation_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_workstation_cluster_rest_required_fields( + request_type=workstations.CreateWorkstationClusterRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["workstation_cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "workstationClusterId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workstation_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "workstationClusterId" in jsonified_request + assert ( + jsonified_request["workstationClusterId"] + == request_init["workstation_cluster_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["workstationClusterId"] = "workstation_cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workstation_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "validate_only", + "workstation_cluster_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "workstationClusterId" in jsonified_request + assert jsonified_request["workstationClusterId"] == "workstation_cluster_id_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_workstation_cluster(request) + + expected_params = [ + ( + "workstationClusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_workstation_cluster_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_workstation_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "validateOnly", + "workstationClusterId", + ) + ) + & set( + ( + "parent", + "workstationClusterId", + "workstationCluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_workstation_cluster_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_create_workstation_cluster" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_create_workstation_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.CreateWorkstationClusterRequest.pb( + workstations.CreateWorkstationClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.CreateWorkstationClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_workstation_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_workstation_cluster_rest_bad_request( + transport: str = "rest", request_type=workstations.CreateWorkstationClusterRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["workstation_cluster"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "network": "network_value", + "subnetwork": "subnetwork_value", + "control_plane_ip": "control_plane_ip_value", + "private_cluster_config": { + "enable_private_endpoint": True, + "cluster_hostname": "cluster_hostname_value", + "service_attachment_uri": "service_attachment_uri_value", + "allowed_projects": ["allowed_projects_value1", "allowed_projects_value2"], + }, + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_workstation_cluster(request) + + +def test_create_workstation_cluster_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + workstation_cluster_id="workstation_cluster_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_workstation_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/workstationClusters" + % client.transport._host, + args[1], + ) + + +def test_create_workstation_cluster_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workstation_cluster( + workstations.CreateWorkstationClusterRequest(), + parent="parent_value", + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + workstation_cluster_id="workstation_cluster_id_value", + ) + + +def test_create_workstation_cluster_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.UpdateWorkstationClusterRequest, + dict, + ], +) +def test_update_workstation_cluster_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation_cluster": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + } + request_init["workstation_cluster"] = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "network": "network_value", + "subnetwork": "subnetwork_value", + "control_plane_ip": "control_plane_ip_value", + "private_cluster_config": { + "enable_private_endpoint": True, + "cluster_hostname": "cluster_hostname_value", + "service_attachment_uri": "service_attachment_uri_value", + "allowed_projects": ["allowed_projects_value1", "allowed_projects_value2"], + }, + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_workstation_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_workstation_cluster_rest_required_fields( + request_type=workstations.UpdateWorkstationClusterRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workstation_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workstation_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_workstation_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_workstation_cluster_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_workstation_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "workstationCluster", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_workstation_cluster_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_update_workstation_cluster" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_update_workstation_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.UpdateWorkstationClusterRequest.pb( + workstations.UpdateWorkstationClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.UpdateWorkstationClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_workstation_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_workstation_cluster_rest_bad_request( + transport: str = "rest", request_type=workstations.UpdateWorkstationClusterRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation_cluster": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + } + request_init["workstation_cluster"] = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "network": "network_value", + "subnetwork": "subnetwork_value", + "control_plane_ip": "control_plane_ip_value", + "private_cluster_config": { + "enable_private_endpoint": True, + "cluster_hostname": "cluster_hostname_value", + "service_attachment_uri": "service_attachment_uri_value", + "allowed_projects": ["allowed_projects_value1", "allowed_projects_value2"], + }, + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_workstation_cluster(request) + + +def test_update_workstation_cluster_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "workstation_cluster": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_workstation_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{workstation_cluster.name=projects/*/locations/*/workstationClusters/*}" + % client.transport._host, + args[1], + ) + + +def test_update_workstation_cluster_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workstation_cluster( + workstations.UpdateWorkstationClusterRequest(), + workstation_cluster=workstations.WorkstationCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_workstation_cluster_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.DeleteWorkstationClusterRequest, + dict, + ], +) +def test_delete_workstation_cluster_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_workstation_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_workstation_cluster_rest_required_fields( + request_type=workstations.DeleteWorkstationClusterRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workstation_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workstation_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "force", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_workstation_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_workstation_cluster_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_workstation_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "force", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_workstation_cluster_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_delete_workstation_cluster" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_delete_workstation_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.DeleteWorkstationClusterRequest.pb( + workstations.DeleteWorkstationClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.DeleteWorkstationClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_workstation_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_workstation_cluster_rest_bad_request( + transport: str = "rest", request_type=workstations.DeleteWorkstationClusterRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_workstation_cluster(request) + + +def test_delete_workstation_cluster_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_workstation_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/workstationClusters/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_workstation_cluster_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workstation_cluster( + workstations.DeleteWorkstationClusterRequest(), + name="name_value", + ) + + +def test_delete_workstation_cluster_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GetWorkstationConfigRequest, + dict, + ], +) +def test_get_workstation_config_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.WorkstationConfig( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + degraded=True, + enable_audit_agent=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.WorkstationConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_workstation_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.WorkstationConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.degraded is True + assert response.enable_audit_agent is True + + +def test_get_workstation_config_rest_required_fields( + request_type=workstations.GetWorkstationConfigRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workstation_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workstation_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.WorkstationConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.WorkstationConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_workstation_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_workstation_config_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_workstation_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_workstation_config_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_get_workstation_config" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_get_workstation_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.GetWorkstationConfigRequest.pb( + workstations.GetWorkstationConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workstations.WorkstationConfig.to_json( + workstations.WorkstationConfig() + ) + + request = workstations.GetWorkstationConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.WorkstationConfig() + + client.get_workstation_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_workstation_config_rest_bad_request( + transport: str = "rest", request_type=workstations.GetWorkstationConfigRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_workstation_config(request) + + +def test_get_workstation_config_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.WorkstationConfig() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.WorkstationConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_workstation_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_get_workstation_config_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workstation_config( + workstations.GetWorkstationConfigRequest(), + name="name_value", + ) + + +def test_get_workstation_config_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListWorkstationConfigsRequest, + dict, + ], +) +def test_list_workstation_configs_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListWorkstationConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_workstation_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_workstation_configs_rest_required_fields( + request_type=workstations.ListWorkstationConfigsRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workstation_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workstation_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.ListWorkstationConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_workstation_configs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_workstation_configs_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_workstation_configs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_workstation_configs_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_list_workstation_configs" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_list_workstation_configs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.ListWorkstationConfigsRequest.pb( + workstations.ListWorkstationConfigsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workstations.ListWorkstationConfigsResponse.to_json( + workstations.ListWorkstationConfigsResponse() + ) + + request = workstations.ListWorkstationConfigsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.ListWorkstationConfigsResponse() + + client.list_workstation_configs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_workstation_configs_rest_bad_request( + transport: str = "rest", request_type=workstations.ListWorkstationConfigsRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_workstation_configs(request) + + +def test_list_workstation_configs_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListWorkstationConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_workstation_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/workstationClusters/*}/workstationConfigs" + % client.transport._host, + args[1], + ) + + +def test_list_workstation_configs_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workstation_configs( + workstations.ListWorkstationConfigsRequest(), + parent="parent_value", + ) + + +def test_list_workstation_configs_rest_pager(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + workstations.ListWorkstationConfigsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + + pager = client.list_workstation_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.WorkstationConfig) for i in results) + + pages = list(client.list_workstation_configs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListUsableWorkstationConfigsRequest, + dict, + ], +) +def test_list_usable_workstation_configs_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListUsableWorkstationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListUsableWorkstationConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_usable_workstation_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableWorkstationConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_usable_workstation_configs_rest_required_fields( + request_type=workstations.ListUsableWorkstationConfigsRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_usable_workstation_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_usable_workstation_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.ListUsableWorkstationConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.ListUsableWorkstationConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_usable_workstation_configs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_usable_workstation_configs_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_usable_workstation_configs._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_usable_workstation_configs_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_list_usable_workstation_configs" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_list_usable_workstation_configs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.ListUsableWorkstationConfigsRequest.pb( + workstations.ListUsableWorkstationConfigsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + workstations.ListUsableWorkstationConfigsResponse.to_json( + workstations.ListUsableWorkstationConfigsResponse() + ) + ) + + request = workstations.ListUsableWorkstationConfigsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.ListUsableWorkstationConfigsResponse() + + client.list_usable_workstation_configs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_usable_workstation_configs_rest_bad_request( + transport: str = "rest", + request_type=workstations.ListUsableWorkstationConfigsRequest, +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_usable_workstation_configs(request) + + +def test_list_usable_workstation_configs_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListUsableWorkstationConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListUsableWorkstationConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_usable_workstation_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/workstationClusters/*}/workstationConfigs:listUsable" + % client.transport._host, + args[1], + ) + + +def test_list_usable_workstation_configs_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_usable_workstation_configs( + workstations.ListUsableWorkstationConfigsRequest(), + parent="parent_value", + ) + + +def test_list_usable_workstation_configs_rest_pager(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationConfigsResponse( + workstation_configs=[ + workstations.WorkstationConfig(), + workstations.WorkstationConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + workstations.ListUsableWorkstationConfigsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + + pager = client.list_usable_workstation_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.WorkstationConfig) for i in results) + + pages = list( + client.list_usable_workstation_configs(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.CreateWorkstationConfigRequest, + dict, + ], +) +def test_create_workstation_config_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request_init["workstation_config"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "idle_timeout": {"seconds": 751, "nanos": 543}, + "running_timeout": {}, + "host": { + "gce_instance": { + "machine_type": "machine_type_value", + "service_account": "service_account_value", + "tags": ["tags_value1", "tags_value2"], + "pool_size": 980, + "pooled_instances": 1706, + "disable_public_ip_addresses": True, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": {"enable_confidential_compute": True}, + "boot_disk_size_gb": 1792, + "accelerators": [{"type_": "type__value", "count": 553}], + } + }, + "persistent_directories": [ + { + "mount_path": "mount_path_value", + "gce_pd": { + "size_gb": 739, + "fs_type": "fs_type_value", + "disk_type": "disk_type_value", + "source_snapshot": "source_snapshot_value", + "reclaim_policy": 1, + }, + } + ], + "container": { + "image": "image_value", + "command": ["command_value1", "command_value2"], + "args": ["args_value1", "args_value2"], + "env": {}, + "working_dir": "working_dir_value", + "run_as_user": 1190, + }, + "encryption_key": { + "kms_key": "kms_key_value", + "kms_key_service_account": "kms_key_service_account_value", + }, + "readiness_checks": [{"path": "path_value", "port": 453}], + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + "enable_audit_agent": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_workstation_config(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_workstation_config_rest_required_fields( + request_type=workstations.CreateWorkstationConfigRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["workstation_config_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "workstationConfigId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workstation_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "workstationConfigId" in jsonified_request + assert ( + jsonified_request["workstationConfigId"] + == request_init["workstation_config_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["workstationConfigId"] = "workstation_config_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workstation_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "validate_only", + "workstation_config_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "workstationConfigId" in jsonified_request + assert jsonified_request["workstationConfigId"] == "workstation_config_id_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_workstation_config(request) + + expected_params = [ + ( + "workstationConfigId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_workstation_config_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_workstation_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "validateOnly", + "workstationConfigId", + ) + ) + & set( + ( + "parent", + "workstationConfigId", + "workstationConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_workstation_config_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_create_workstation_config" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_create_workstation_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.CreateWorkstationConfigRequest.pb( + workstations.CreateWorkstationConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.CreateWorkstationConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_workstation_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_workstation_config_rest_bad_request( + transport: str = "rest", request_type=workstations.CreateWorkstationConfigRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + request_init["workstation_config"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "idle_timeout": {"seconds": 751, "nanos": 543}, + "running_timeout": {}, + "host": { + "gce_instance": { + "machine_type": "machine_type_value", + "service_account": "service_account_value", + "tags": ["tags_value1", "tags_value2"], + "pool_size": 980, + "pooled_instances": 1706, + "disable_public_ip_addresses": True, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": {"enable_confidential_compute": True}, + "boot_disk_size_gb": 1792, + "accelerators": [{"type_": "type__value", "count": 553}], + } + }, + "persistent_directories": [ + { + "mount_path": "mount_path_value", + "gce_pd": { + "size_gb": 739, + "fs_type": "fs_type_value", + "disk_type": "disk_type_value", + "source_snapshot": "source_snapshot_value", + "reclaim_policy": 1, + }, + } + ], + "container": { + "image": "image_value", + "command": ["command_value1", "command_value2"], + "args": ["args_value1", "args_value2"], + "env": {}, + "working_dir": "working_dir_value", + "run_as_user": 1190, + }, + "encryption_key": { + "kms_key": "kms_key_value", + "kms_key_service_account": "kms_key_service_account_value", + }, + "readiness_checks": [{"path": "path_value", "port": 453}], + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + "enable_audit_agent": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_workstation_config(request) + + +def test_create_workstation_config_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + workstation_config=workstations.WorkstationConfig(name="name_value"), + workstation_config_id="workstation_config_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_workstation_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/workstationClusters/*}/workstationConfigs" + % client.transport._host, + args[1], + ) + + +def test_create_workstation_config_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workstation_config( + workstations.CreateWorkstationConfigRequest(), + parent="parent_value", + workstation_config=workstations.WorkstationConfig(name="name_value"), + workstation_config_id="workstation_config_id_value", + ) + + +def test_create_workstation_config_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.UpdateWorkstationConfigRequest, + dict, + ], +) +def test_update_workstation_config_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation_config": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + } + request_init["workstation_config"] = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "idle_timeout": {"seconds": 751, "nanos": 543}, + "running_timeout": {}, + "host": { + "gce_instance": { + "machine_type": "machine_type_value", + "service_account": "service_account_value", + "tags": ["tags_value1", "tags_value2"], + "pool_size": 980, + "pooled_instances": 1706, + "disable_public_ip_addresses": True, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": {"enable_confidential_compute": True}, + "boot_disk_size_gb": 1792, + "accelerators": [{"type_": "type__value", "count": 553}], + } + }, + "persistent_directories": [ + { + "mount_path": "mount_path_value", + "gce_pd": { + "size_gb": 739, + "fs_type": "fs_type_value", + "disk_type": "disk_type_value", + "source_snapshot": "source_snapshot_value", + "reclaim_policy": 1, + }, + } + ], + "container": { + "image": "image_value", + "command": ["command_value1", "command_value2"], + "args": ["args_value1", "args_value2"], + "env": {}, + "working_dir": "working_dir_value", + "run_as_user": 1190, + }, + "encryption_key": { + "kms_key": "kms_key_value", + "kms_key_service_account": "kms_key_service_account_value", + }, + "readiness_checks": [{"path": "path_value", "port": 453}], + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + "enable_audit_agent": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_workstation_config(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_workstation_config_rest_required_fields( + request_type=workstations.UpdateWorkstationConfigRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workstation_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workstation_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_workstation_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_workstation_config_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_workstation_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "workstationConfig", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_workstation_config_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_update_workstation_config" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_update_workstation_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.UpdateWorkstationConfigRequest.pb( + workstations.UpdateWorkstationConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.UpdateWorkstationConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_workstation_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_workstation_config_rest_bad_request( + transport: str = "rest", request_type=workstations.UpdateWorkstationConfigRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation_config": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + } + request_init["workstation_config"] = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "idle_timeout": {"seconds": 751, "nanos": 543}, + "running_timeout": {}, + "host": { + "gce_instance": { + "machine_type": "machine_type_value", + "service_account": "service_account_value", + "tags": ["tags_value1", "tags_value2"], + "pool_size": 980, + "pooled_instances": 1706, + "disable_public_ip_addresses": True, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": {"enable_confidential_compute": True}, + "boot_disk_size_gb": 1792, + "accelerators": [{"type_": "type__value", "count": 553}], + } + }, + "persistent_directories": [ + { + "mount_path": "mount_path_value", + "gce_pd": { + "size_gb": 739, + "fs_type": "fs_type_value", + "disk_type": "disk_type_value", + "source_snapshot": "source_snapshot_value", + "reclaim_policy": 1, + }, + } + ], + "container": { + "image": "image_value", + "command": ["command_value1", "command_value2"], + "args": ["args_value1", "args_value2"], + "env": {}, + "working_dir": "working_dir_value", + "run_as_user": 1190, + }, + "encryption_key": { + "kms_key": "kms_key_value", + "kms_key_service_account": "kms_key_service_account_value", + }, + "readiness_checks": [{"path": "path_value", "port": 453}], + "degraded": True, + "conditions": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + "enable_audit_agent": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_workstation_config(request) + + +def test_update_workstation_config_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "workstation_config": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + workstation_config=workstations.WorkstationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_workstation_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{workstation_config.name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_update_workstation_config_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workstation_config( + workstations.UpdateWorkstationConfigRequest(), + workstation_config=workstations.WorkstationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_workstation_config_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.DeleteWorkstationConfigRequest, + dict, + ], +) +def test_delete_workstation_config_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_workstation_config(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_workstation_config_rest_required_fields( + request_type=workstations.DeleteWorkstationConfigRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workstation_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workstation_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "force", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_workstation_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_workstation_config_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_workstation_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "force", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_workstation_config_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_delete_workstation_config" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_delete_workstation_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.DeleteWorkstationConfigRequest.pb( + workstations.DeleteWorkstationConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.DeleteWorkstationConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_workstation_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_workstation_config_rest_bad_request( + transport: str = "rest", request_type=workstations.DeleteWorkstationConfigRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_workstation_config(request) + + +def test_delete_workstation_config_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_workstation_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_workstation_config_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workstation_config( + workstations.DeleteWorkstationConfigRequest(), + name="name_value", + ) + + +def test_delete_workstation_config_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GetWorkstationRequest, + dict, + ], +) +def test_get_workstation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.Workstation( + name="name_value", + display_name="display_name_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + state=workstations.Workstation.State.STATE_STARTING, + host="host_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.Workstation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_workstation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.Workstation) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.state == workstations.Workstation.State.STATE_STARTING + assert response.host == "host_value" + + +def test_get_workstation_rest_required_fields( + request_type=workstations.GetWorkstationRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.Workstation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.Workstation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_workstation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_workstation_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_workstation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_workstation_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_get_workstation" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_get_workstation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.GetWorkstationRequest.pb( + workstations.GetWorkstationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workstations.Workstation.to_json( + workstations.Workstation() + ) + + request = workstations.GetWorkstationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.Workstation() + + client.get_workstation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_workstation_rest_bad_request( + transport: str = "rest", request_type=workstations.GetWorkstationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_workstation(request) + + +def test_get_workstation_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.Workstation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.Workstation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_workstation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_workstation_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workstation( + workstations.GetWorkstationRequest(), + name="name_value", + ) + + +def test_get_workstation_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListWorkstationsRequest, + dict, + ], +) +def test_list_workstations_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListWorkstationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_workstations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkstationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_workstations_rest_required_fields( + request_type=workstations.ListWorkstationsRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workstations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workstations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.ListWorkstationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_workstations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_workstations_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_workstations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_workstations_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_list_workstations" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_list_workstations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.ListWorkstationsRequest.pb( + workstations.ListWorkstationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workstations.ListWorkstationsResponse.to_json( + workstations.ListWorkstationsResponse() + ) + + request = workstations.ListWorkstationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.ListWorkstationsResponse() + + client.list_workstations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_workstations_rest_bad_request( + transport: str = "rest", request_type=workstations.ListWorkstationsRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_workstations(request) + + +def test_list_workstations_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListWorkstationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListWorkstationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_workstations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}/workstations" + % client.transport._host, + args[1], + ) + + +def test_list_workstations_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workstations( + workstations.ListWorkstationsRequest(), + parent="parent_value", + ) + + +def test_list_workstations_rest_pager(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + workstations.ListWorkstationsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + + pager = client.list_workstations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.Workstation) for i in results) + + pages = list(client.list_workstations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.ListUsableWorkstationsRequest, + dict, + ], +) +def test_list_usable_workstations_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListUsableWorkstationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListUsableWorkstationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_usable_workstations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableWorkstationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_usable_workstations_rest_required_fields( + request_type=workstations.ListUsableWorkstationsRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_usable_workstations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_usable_workstations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.ListUsableWorkstationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.ListUsableWorkstationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_usable_workstations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_usable_workstations_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_usable_workstations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_usable_workstations_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_list_usable_workstations" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_list_usable_workstations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.ListUsableWorkstationsRequest.pb( + workstations.ListUsableWorkstationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workstations.ListUsableWorkstationsResponse.to_json( + workstations.ListUsableWorkstationsResponse() + ) + + request = workstations.ListUsableWorkstationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.ListUsableWorkstationsResponse() + + client.list_usable_workstations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_usable_workstations_rest_bad_request( + transport: str = "rest", request_type=workstations.ListUsableWorkstationsRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_usable_workstations(request) + + +def test_list_usable_workstations_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.ListUsableWorkstationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.ListUsableWorkstationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_usable_workstations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}/workstations:listUsable" + % client.transport._host, + args[1], + ) + + +def test_list_usable_workstations_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_usable_workstations( + workstations.ListUsableWorkstationsRequest(), + parent="parent_value", + ) + + +def test_list_usable_workstations_rest_pager(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + workstations.Workstation(), + ], + next_page_token="abc", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[], + next_page_token="def", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + ], + next_page_token="ghi", + ), + workstations.ListUsableWorkstationsResponse( + workstations=[ + workstations.Workstation(), + workstations.Workstation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + workstations.ListUsableWorkstationsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + + pager = client.list_usable_workstations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workstations.Workstation) for i in results) + + pages = list(client.list_usable_workstations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.CreateWorkstationRequest, + dict, + ], +) +def test_create_workstation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request_init["workstation"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "state": 1, + "host": "host_value", + "env": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_workstation(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_workstation_rest_required_fields( + request_type=workstations.CreateWorkstationRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["workstation_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "workstationId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "workstationId" in jsonified_request + assert jsonified_request["workstationId"] == request_init["workstation_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["workstationId"] = "workstation_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workstation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "validate_only", + "workstation_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "workstationId" in jsonified_request + assert jsonified_request["workstationId"] == "workstation_id_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_workstation(request) + + expected_params = [ + ( + "workstationId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_workstation_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_workstation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "validateOnly", + "workstationId", + ) + ) + & set( + ( + "parent", + "workstationId", + "workstation", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_workstation_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_create_workstation" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_create_workstation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.CreateWorkstationRequest.pb( + workstations.CreateWorkstationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.CreateWorkstationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_workstation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_workstation_rest_bad_request( + transport: str = "rest", request_type=workstations.CreateWorkstationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request_init["workstation"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "state": 1, + "host": "host_value", + "env": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_workstation(request) + + +def test_create_workstation_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + workstation=workstations.Workstation(name="name_value"), + workstation_id="workstation_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_workstation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/workstationClusters/*/workstationConfigs/*}/workstations" + % client.transport._host, + args[1], + ) + + +def test_create_workstation_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workstation( + workstations.CreateWorkstationRequest(), + parent="parent_value", + workstation=workstations.Workstation(name="name_value"), + workstation_id="workstation_id_value", + ) + + +def test_create_workstation_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.UpdateWorkstationRequest, + dict, + ], +) +def test_update_workstation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + } + request_init["workstation"] = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "state": 1, + "host": "host_value", + "env": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_workstation(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_workstation_rest_required_fields( + request_type=workstations.UpdateWorkstationRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workstation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_workstation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_workstation_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_workstation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "workstation", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_workstation_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_update_workstation" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_update_workstation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.UpdateWorkstationRequest.pb( + workstations.UpdateWorkstationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.UpdateWorkstationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_workstation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_workstation_rest_bad_request( + transport: str = "rest", request_type=workstations.UpdateWorkstationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + } + request_init["workstation"] = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5", + "display_name": "display_name_value", + "uid": "uid_value", + "reconciling": True, + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "etag": "etag_value", + "state": 1, + "host": "host_value", + "env": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_workstation(request) + + +def test_update_workstation_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "workstation": { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + } + + # get truthy value for each flattened field + mock_args = dict( + workstation=workstations.Workstation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_workstation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{workstation.name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}" + % client.transport._host, + args[1], + ) + + +def test_update_workstation_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workstation( + workstations.UpdateWorkstationRequest(), + workstation=workstations.Workstation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_workstation_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.DeleteWorkstationRequest, + dict, + ], +) +def test_delete_workstation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_workstation(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_workstation_rest_required_fields( + request_type=workstations.DeleteWorkstationRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workstation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_workstation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_workstation_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_workstation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_workstation_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_delete_workstation" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_delete_workstation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.DeleteWorkstationRequest.pb( + workstations.DeleteWorkstationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.DeleteWorkstationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_workstation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_workstation_rest_bad_request( + transport: str = "rest", request_type=workstations.DeleteWorkstationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_workstation(request) + + +def test_delete_workstation_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_workstation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_workstation_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workstation( + workstations.DeleteWorkstationRequest(), + name="name_value", + ) + + +def test_delete_workstation_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.StartWorkstationRequest, + dict, + ], +) +def test_start_workstation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.start_workstation(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_start_workstation_rest_required_fields( + request_type=workstations.StartWorkstationRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).start_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).start_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.start_workstation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_start_workstation_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.start_workstation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_workstation_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_start_workstation" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_start_workstation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.StartWorkstationRequest.pb( + workstations.StartWorkstationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.StartWorkstationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.start_workstation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_workstation_rest_bad_request( + transport: str = "rest", request_type=workstations.StartWorkstationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_workstation(request) + + +def test_start_workstation_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.start_workstation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:start" + % client.transport._host, + args[1], + ) + + +def test_start_workstation_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_workstation( + workstations.StartWorkstationRequest(), + name="name_value", + ) + + +def test_start_workstation_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.StopWorkstationRequest, + dict, + ], +) +def test_stop_workstation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.stop_workstation(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_stop_workstation_rest_required_fields( + request_type=workstations.StopWorkstationRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stop_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stop_workstation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.stop_workstation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_stop_workstation_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.stop_workstation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_workstation_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkstationsRestInterceptor, "post_stop_workstation" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_stop_workstation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.StopWorkstationRequest.pb( + workstations.StopWorkstationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workstations.StopWorkstationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.stop_workstation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_workstation_rest_bad_request( + transport: str = "rest", request_type=workstations.StopWorkstationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop_workstation(request) + + +def test_stop_workstation_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.stop_workstation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:stop" + % client.transport._host, + args[1], + ) + + +def test_stop_workstation_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_workstation( + workstations.StopWorkstationRequest(), + name="name_value", + ) + + +def test_stop_workstation_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workstations.GenerateAccessTokenRequest, + dict, + ], +) +def test_generate_access_token_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.GenerateAccessTokenResponse( + access_token="access_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.GenerateAccessTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_access_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, workstations.GenerateAccessTokenResponse) + assert response.access_token == "access_token_value" + + +def test_generate_access_token_rest_required_fields( + request_type=workstations.GenerateAccessTokenRequest, +): + transport_class = transports.WorkstationsRestTransport + + request_init = {} + request_init["workstation"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_access_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["workstation"] = "workstation_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_access_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "workstation" in jsonified_request + assert jsonified_request["workstation"] == "workstation_value" + + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workstations.GenerateAccessTokenResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workstations.GenerateAccessTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.generate_access_token(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_access_token_rest_unset_required_fields(): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_access_token._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("workstation",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_access_token_rest_interceptors(null_interceptor): + transport = transports.WorkstationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkstationsRestInterceptor(), + ) + client = WorkstationsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkstationsRestInterceptor, "post_generate_access_token" + ) as post, mock.patch.object( + transports.WorkstationsRestInterceptor, "pre_generate_access_token" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workstations.GenerateAccessTokenRequest.pb( + workstations.GenerateAccessTokenRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workstations.GenerateAccessTokenResponse.to_json( + workstations.GenerateAccessTokenResponse() + ) + + request = workstations.GenerateAccessTokenRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workstations.GenerateAccessTokenResponse() + + client.generate_access_token( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_access_token_rest_bad_request( + transport: str = "rest", request_type=workstations.GenerateAccessTokenRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "workstation": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_access_token(request) + + +def test_generate_access_token_rest_flattened(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workstations.GenerateAccessTokenResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "workstation": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4/workstations/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + workstation="workstation_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workstations.GenerateAccessTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.generate_access_token(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{workstation=projects/*/locations/*/workstationClusters/*/workstationConfigs/*/workstations/*}:generateAccessToken" + % client.transport._host, + args[1], + ) + + +def test_generate_access_token_rest_flattened_error(transport: str = "rest"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_access_token( + workstations.GenerateAccessTokenRequest(), + workstation="workstation_value", + ) + + +def test_generate_access_token_rest_error(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.WorkstationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.WorkstationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = WorkstationsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.WorkstationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = WorkstationsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = WorkstationsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.WorkstationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = WorkstationsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.WorkstationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = WorkstationsClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.WorkstationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.WorkstationsGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkstationsGrpcTransport, + transports.WorkstationsGrpcAsyncIOTransport, + transports.WorkstationsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = WorkstationsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.WorkstationsGrpcTransport, + ) + + +def test_workstations_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.WorkstationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_workstations_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.workstations_v1beta.services.workstations.transports.WorkstationsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.WorkstationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_workstation_cluster", + "list_workstation_clusters", + "create_workstation_cluster", + "update_workstation_cluster", + "delete_workstation_cluster", + "get_workstation_config", + "list_workstation_configs", + "list_usable_workstation_configs", + "create_workstation_config", + "update_workstation_config", + "delete_workstation_config", + "get_workstation", + "list_workstations", + "list_usable_workstations", + "create_workstation", + "update_workstation", + "delete_workstation", + "start_workstation", + "stop_workstation", + "generate_access_token", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_workstations_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.workstations_v1beta.services.workstations.transports.WorkstationsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.WorkstationsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_workstations_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.workstations_v1beta.services.workstations.transports.WorkstationsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.WorkstationsTransport() + adc.assert_called_once() + + +def test_workstations_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + WorkstationsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkstationsGrpcTransport, + transports.WorkstationsGrpcAsyncIOTransport, + ], +) +def test_workstations_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkstationsGrpcTransport, + transports.WorkstationsGrpcAsyncIOTransport, + transports.WorkstationsRestTransport, + ], +) +def test_workstations_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.WorkstationsGrpcTransport, grpc_helpers), + (transports.WorkstationsGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_workstations_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "workstations.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="workstations.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.WorkstationsGrpcTransport, transports.WorkstationsGrpcAsyncIOTransport], +) +def test_workstations_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_workstations_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.WorkstationsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_workstations_rest_lro_client(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_workstations_host_no_port(transport_name): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="workstations.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "workstations.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://workstations.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_workstations_host_with_port(transport_name): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="workstations.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "workstations.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://workstations.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_workstations_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = WorkstationsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = WorkstationsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_workstation_cluster._session + session2 = client2.transport.get_workstation_cluster._session + assert session1 != session2 + session1 = client1.transport.list_workstation_clusters._session + session2 = client2.transport.list_workstation_clusters._session + assert session1 != session2 + session1 = client1.transport.create_workstation_cluster._session + session2 = client2.transport.create_workstation_cluster._session + assert session1 != session2 + session1 = client1.transport.update_workstation_cluster._session + session2 = client2.transport.update_workstation_cluster._session + assert session1 != session2 + session1 = client1.transport.delete_workstation_cluster._session + session2 = client2.transport.delete_workstation_cluster._session + assert session1 != session2 + session1 = client1.transport.get_workstation_config._session + session2 = client2.transport.get_workstation_config._session + assert session1 != session2 + session1 = client1.transport.list_workstation_configs._session + session2 = client2.transport.list_workstation_configs._session + assert session1 != session2 + session1 = client1.transport.list_usable_workstation_configs._session + session2 = client2.transport.list_usable_workstation_configs._session + assert session1 != session2 + session1 = client1.transport.create_workstation_config._session + session2 = client2.transport.create_workstation_config._session + assert session1 != session2 + session1 = client1.transport.update_workstation_config._session + session2 = client2.transport.update_workstation_config._session + assert session1 != session2 + session1 = client1.transport.delete_workstation_config._session + session2 = client2.transport.delete_workstation_config._session + assert session1 != session2 + session1 = client1.transport.get_workstation._session + session2 = client2.transport.get_workstation._session + assert session1 != session2 + session1 = client1.transport.list_workstations._session + session2 = client2.transport.list_workstations._session + assert session1 != session2 + session1 = client1.transport.list_usable_workstations._session + session2 = client2.transport.list_usable_workstations._session + assert session1 != session2 + session1 = client1.transport.create_workstation._session + session2 = client2.transport.create_workstation._session + assert session1 != session2 + session1 = client1.transport.update_workstation._session + session2 = client2.transport.update_workstation._session + assert session1 != session2 + session1 = client1.transport.delete_workstation._session + session2 = client2.transport.delete_workstation._session + assert session1 != session2 + session1 = client1.transport.start_workstation._session + session2 = client2.transport.start_workstation._session + assert session1 != session2 + session1 = client1.transport.stop_workstation._session + session2 = client2.transport.stop_workstation._session + assert session1 != session2 + session1 = client1.transport.generate_access_token._session + session2 = client2.transport.generate_access_token._session + assert session1 != session2 + + +def test_workstations_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.WorkstationsGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_workstations_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.WorkstationsGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.WorkstationsGrpcTransport, transports.WorkstationsGrpcAsyncIOTransport], +) +def test_workstations_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.WorkstationsGrpcTransport, transports.WorkstationsGrpcAsyncIOTransport], +) +def test_workstations_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_workstations_grpc_lro_client(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_workstations_grpc_lro_async_client(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_workstation_path(): + project = "squid" + location = "clam" + workstation_cluster = "whelk" + workstation_config = "octopus" + workstation = "oyster" + expected = "projects/{project}/locations/{location}/workstationClusters/{workstation_cluster}/workstationConfigs/{workstation_config}/workstations/{workstation}".format( + project=project, + location=location, + workstation_cluster=workstation_cluster, + workstation_config=workstation_config, + workstation=workstation, + ) + actual = WorkstationsClient.workstation_path( + project, location, workstation_cluster, workstation_config, workstation + ) + assert expected == actual + + +def test_parse_workstation_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "workstation_cluster": "mussel", + "workstation_config": "winkle", + "workstation": "nautilus", + } + path = WorkstationsClient.workstation_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_workstation_path(path) + assert expected == actual + + +def test_workstation_cluster_path(): + project = "scallop" + location = "abalone" + workstation_cluster = "squid" + expected = "projects/{project}/locations/{location}/workstationClusters/{workstation_cluster}".format( + project=project, + location=location, + workstation_cluster=workstation_cluster, + ) + actual = WorkstationsClient.workstation_cluster_path( + project, location, workstation_cluster + ) + assert expected == actual + + +def test_parse_workstation_cluster_path(): + expected = { + "project": "clam", + "location": "whelk", + "workstation_cluster": "octopus", + } + path = WorkstationsClient.workstation_cluster_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_workstation_cluster_path(path) + assert expected == actual + + +def test_workstation_config_path(): + project = "oyster" + location = "nudibranch" + workstation_cluster = "cuttlefish" + workstation_config = "mussel" + expected = "projects/{project}/locations/{location}/workstationClusters/{workstation_cluster}/workstationConfigs/{workstation_config}".format( + project=project, + location=location, + workstation_cluster=workstation_cluster, + workstation_config=workstation_config, + ) + actual = WorkstationsClient.workstation_config_path( + project, location, workstation_cluster, workstation_config + ) + assert expected == actual + + +def test_parse_workstation_config_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "workstation_cluster": "scallop", + "workstation_config": "abalone", + } + path = WorkstationsClient.workstation_config_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_workstation_config_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = WorkstationsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = WorkstationsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = WorkstationsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = WorkstationsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = WorkstationsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = WorkstationsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = WorkstationsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = WorkstationsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = WorkstationsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = WorkstationsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = WorkstationsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.WorkstationsTransport, "_prep_wrapped_messages" + ) as prep: + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.WorkstationsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = WorkstationsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "resource": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "resource": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "resource": "projects/sample1/locations/sample2/workstationClusters/sample3/workstationConfigs/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = WorkstationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = WorkstationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (WorkstationsClient, transports.WorkstationsGrpcTransport), + (WorkstationsAsyncClient, transports.WorkstationsGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-workstations/workstations-v1-py.tar.gz b/packages/google-cloud-workstations/workstations-v1-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-geo-type/.coveragerc b/packages/google-geo-type/.coveragerc index f09c89201dd1..e4af1d8d84ca 100644 --- a/packages/google-geo-type/.coveragerc +++ b/packages/google-geo-type/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/geo/type/__init__.py + google/geo/type/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/google-geo-type/.repo-metadata.json b/packages/google-geo-type/.repo-metadata.json index d545aea5a0c1..5038326bb4b7 100644 --- a/packages/google-geo-type/.repo-metadata.json +++ b/packages/google-geo-type/.repo-metadata.json @@ -3,7 +3,7 @@ "name_pretty": "Geo Type Protos", "api_description": "", "product_documentation": "https://mapsplatform.google.com/maps-products", - "client_documentation": "https://cloud.google.com/python/docs/reference/type/latest", + "client_documentation": "https://googleapis.dev/python/geotype/latest", "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", "release_level": "preview", "language": "python", diff --git a/packages/google-geo-type/CHANGELOG.md b/packages/google-geo-type/CHANGELOG.md index 43627bf1b331..e86ceccf44e7 100644 --- a/packages/google-geo-type/CHANGELOG.md +++ b/packages/google-geo-type/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.3.1](https://github.com/googleapis/google-cloud-python/compare/google-geo-type-v0.3.0...google-geo-type-v0.3.1) (2023-06-03) + + +### Documentation + +* fix broken client library documentation links ([#11192](https://github.com/googleapis/google-cloud-python/issues/11192)) ([5e17f7a](https://github.com/googleapis/google-cloud-python/commit/5e17f7a901bbbae8ff9a44ed62f1abd2386da2c8)) + ## [0.3.0](https://github.com/googleapis/google-cloud-python/compare/google-geo-type-v0.2.1...google-geo-type-v0.3.0) (2023-01-10) diff --git a/packages/google-geo-type/README.rst b/packages/google-geo-type/README.rst index b7f4b69e8953..90174ba55dea 100644 --- a/packages/google-geo-type/README.rst +++ b/packages/google-geo-type/README.rst @@ -15,7 +15,7 @@ Python Client for Geo Type Protos .. |versions| image:: https://img.shields.io/pypi/pyversions/google-geo-type.svg :target: https://pypi.org/project/google-geo-type/ .. _Geo Type Protos: https://mapsplatform.google.com/maps-products -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/type/latest +.. _Client Library Documentation: https://googleapis.dev/python/geotype/latest .. _Product Documentation: https://mapsplatform.google.com/maps-products Quick Start diff --git a/packages/google-geo-type/google/geo/type/gapic_version.py b/packages/google-geo-type/google/geo/type/gapic_version.py index 7260eee71a46..056efd287f1d 100644 --- a/packages/google-geo-type/google/geo/type/gapic_version.py +++ b/packages/google-geo-type/google/geo/type/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.3.0" # {x-release-please-version} +__version__ = "0.3.1" # {x-release-please-version} diff --git a/packages/google-geo-type/google/geo/type/types/viewport.py b/packages/google-geo-type/google/geo/type/types/viewport.py index 77ef331a3aa6..44843f72a184 100644 --- a/packages/google-geo-type/google/geo/type/types/viewport.py +++ b/packages/google-geo-type/google/geo/type/types/viewport.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.type import latlng_pb2 # type: ignore diff --git a/packages/google-geo-type/noxfile.py b/packages/google-geo-type/noxfile.py index fcacc5d528dd..1749edb5dad7 100644 --- a/packages/google-geo-type/noxfile.py +++ b/packages/google-geo-type/noxfile.py @@ -189,9 +189,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -378,8 +378,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", diff --git a/packages/google-geo-type/setup.py b/packages/google-geo-type/setup.py index a4cfac967ac9..cf4011df1024 100644 --- a/packages/google-geo-type/setup.py +++ b/packages/google-geo-type/setup.py @@ -55,9 +55,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.geo"] setuptools.setup( name=name, diff --git a/packages/google-maps-addressvalidation/.coveragerc b/packages/google-maps-addressvalidation/.coveragerc index d5ad639946f8..e4692ea29e6d 100644 --- a/packages/google-maps-addressvalidation/.coveragerc +++ b/packages/google-maps-addressvalidation/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/maps/addressvalidation/__init__.py + google/maps/addressvalidation/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/google-maps-addressvalidation/.repo-metadata.json b/packages/google-maps-addressvalidation/.repo-metadata.json index d6e4a1d0f1a3..cadd406f867f 100644 --- a/packages/google-maps-addressvalidation/.repo-metadata.json +++ b/packages/google-maps-addressvalidation/.repo-metadata.json @@ -3,7 +3,7 @@ "name_pretty": "Address Validation API", "api_description": "Address Validation lets you validate and correct address inputs with Places data powered by Google Maps Platform.", "product_documentation": "https://mapsplatform.google.com/maps-products/address-validation/", - "client_documentation": "https://cloud.google.com/python/docs/reference/addressvalidation/latest", + "client_documentation": "https://googleapis.dev/python/addressvalidation/latest", "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", "release_level": "preview", "language": "python", diff --git a/packages/google-maps-addressvalidation/CHANGELOG.md b/packages/google-maps-addressvalidation/CHANGELOG.md index 228978850da5..a3696304319e 100644 --- a/packages/google-maps-addressvalidation/CHANGELOG.md +++ b/packages/google-maps-addressvalidation/CHANGELOG.md @@ -1,5 +1,40 @@ # Changelog +## [0.3.4](https://github.com/googleapis/google-cloud-python/compare/google-maps-addressvalidation-v0.3.3...google-maps-addressvalidation-v0.3.4) (2023-06-03) + + +### Documentation + +* fix broken client library documentation links ([#11192](https://github.com/googleapis/google-cloud-python/issues/11192)) ([5e17f7a](https://github.com/googleapis/google-cloud-python/commit/5e17f7a901bbbae8ff9a44ed62f1abd2386da2c8)) + +## [0.3.3](https://github.com/googleapis/google-cloud-python/compare/google-maps-addressvalidation-v0.3.2...google-maps-addressvalidation-v0.3.3) (2023-04-15) + + +### Documentation + +* Update description of the postal address ([#11066](https://github.com/googleapis/google-cloud-python/issues/11066)) ([a0b1149](https://github.com/googleapis/google-cloud-python/commit/a0b11490ea6488d725a4738fdaa63e98947f8528)) + +## [0.3.2](https://github.com/googleapis/google-cloud-python/compare/google-maps-addressvalidation-v0.3.1...google-maps-addressvalidation-v0.3.2) (2023-04-11) + + +### Documentation + +* Update some Address Validation API proto descriptions to improve clarity ([#11056](https://github.com/googleapis/google-cloud-python/issues/11056)) ([7ca4918](https://github.com/googleapis/google-cloud-python/commit/7ca4918465957e510028d4d2465d8ba0424ad97d)) + +## [0.3.1](https://github.com/googleapis/google-cloud-python/compare/google-maps-addressvalidation-v0.3.0...google-maps-addressvalidation-v0.3.1) (2023-03-25) + + +### Documentation + +* Fix formatting of request arg in docstring ([#10867](https://github.com/googleapis/google-cloud-python/issues/10867)) ([d34a425](https://github.com/googleapis/google-cloud-python/commit/d34a425f7d0f02bebaf20d24b725b8c25c699697)) + +## [0.3.0](https://github.com/googleapis/google-cloud-python/compare/google-maps-addressvalidation-v0.2.1...google-maps-addressvalidation-v0.3.0) (2023-02-09) + + +### Features + +* enable "rest" transport in Python for services supporting numeric enums ([#10839](https://github.com/googleapis/google-cloud-python/issues/10839)) ([ad59d56](https://github.com/googleapis/google-cloud-python/commit/ad59d569bda339ed31500602e2db369afdbfcf0b)) + ## [0.2.1](https://github.com/googleapis/google-cloud-python/compare/google-maps-addressvalidation-v0.2.0...google-maps-addressvalidation-v0.2.1) (2023-01-20) diff --git a/packages/google-maps-addressvalidation/README.rst b/packages/google-maps-addressvalidation/README.rst index ac223121954e..a30b60168339 100644 --- a/packages/google-maps-addressvalidation/README.rst +++ b/packages/google-maps-addressvalidation/README.rst @@ -15,7 +15,7 @@ Python Client for Address Validation API .. |versions| image:: https://img.shields.io/pypi/pyversions/google-maps-addressvalidation.svg :target: https://pypi.org/project/google-maps-addressvalidation/ .. _Address Validation API: https://mapsplatform.google.com/maps-products/address-validation/ -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/addressvalidation/latest +.. _Client Library Documentation: https://googleapis.dev/python/addressvalidation/latest .. _Product Documentation: https://mapsplatform.google.com/maps-products/address-validation/ Quick Start diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation/gapic_version.py b/packages/google-maps-addressvalidation/google/maps/addressvalidation/gapic_version.py index 9af1468d4a23..0eb6a18fbd55 100644 --- a/packages/google-maps-addressvalidation/google/maps/addressvalidation/gapic_version.py +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.2.1" # {x-release-please-version} +__version__ = "0.3.4" # {x-release-please-version} diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/__init__.py b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/__init__.py index c7fcd93ffe51..b29ddb46181c 100644 --- a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/__init__.py +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.maps.addressvalidation import gapic_version as package_version +from google.maps.addressvalidation_v1 import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/gapic_metadata.json b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/gapic_metadata.json index ecefddc7312c..c521c066be76 100644 --- a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/gapic_metadata.json +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/gapic_metadata.json @@ -36,6 +36,21 @@ ] } } + }, + "rest": { + "libraryClient": "AddressValidationClient", + "rpcs": { + "ProvideValidationFeedback": { + "methods": [ + "provide_validation_feedback" + ] + }, + "ValidateAddress": { + "methods": [ + "validate_address" + ] + } + } } } } diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/gapic_version.py b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/gapic_version.py index aa80f74315ce..0eb6a18fbd55 100644 --- a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/gapic_version.py +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.3.4" # {x-release-please-version} diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/client.py b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/client.py index 0e1d9fa717c2..684fa92169ab 100644 --- a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/client.py +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/client.py @@ -51,6 +51,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, AddressValidationTransport from .transports.grpc import AddressValidationGrpcTransport from .transports.grpc_asyncio import AddressValidationGrpcAsyncIOTransport +from .transports.rest import AddressValidationRestTransport class AddressValidationClientMeta(type): @@ -66,6 +67,7 @@ class AddressValidationClientMeta(type): ) # type: Dict[str, Type[AddressValidationTransport]] _transport_registry["grpc"] = AddressValidationGrpcTransport _transport_registry["grpc_asyncio"] = AddressValidationGrpcAsyncIOTransport + _transport_registry["rest"] = AddressValidationRestTransport def get_transport_class( cls, diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/transports/__init__.py b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/transports/__init__.py index 8a02cdb7f9ff..af8a87aa3a8b 100644 --- a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/transports/__init__.py +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/transports/__init__.py @@ -19,14 +19,18 @@ from .base import AddressValidationTransport from .grpc import AddressValidationGrpcTransport from .grpc_asyncio import AddressValidationGrpcAsyncIOTransport +from .rest import AddressValidationRestInterceptor, AddressValidationRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[AddressValidationTransport]] _transport_registry["grpc"] = AddressValidationGrpcTransport _transport_registry["grpc_asyncio"] = AddressValidationGrpcAsyncIOTransport +_transport_registry["rest"] = AddressValidationRestTransport __all__ = ( "AddressValidationTransport", "AddressValidationGrpcTransport", "AddressValidationGrpcAsyncIOTransport", + "AddressValidationRestTransport", + "AddressValidationRestInterceptor", ) diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/transports/rest.py b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/transports/rest.py new file mode 100644 index 000000000000..b0be9027cb53 --- /dev/null +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/services/address_validation/transports/rest.py @@ -0,0 +1,472 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.maps.addressvalidation_v1.types import address_validation_service + +from .base import AddressValidationTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AddressValidationRestInterceptor: + """Interceptor for AddressValidation. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AddressValidationRestTransport. + + .. code-block:: python + class MyCustomAddressValidationInterceptor(AddressValidationRestInterceptor): + def pre_provide_validation_feedback(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_provide_validation_feedback(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_validate_address(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_validate_address(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AddressValidationRestTransport(interceptor=MyCustomAddressValidationInterceptor()) + client = AddressValidationClient(transport=transport) + + + """ + + def pre_provide_validation_feedback( + self, + request: address_validation_service.ProvideValidationFeedbackRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + address_validation_service.ProvideValidationFeedbackRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for provide_validation_feedback + + Override in a subclass to manipulate the request or metadata + before they are sent to the AddressValidation server. + """ + return request, metadata + + def post_provide_validation_feedback( + self, response: address_validation_service.ProvideValidationFeedbackResponse + ) -> address_validation_service.ProvideValidationFeedbackResponse: + """Post-rpc interceptor for provide_validation_feedback + + Override in a subclass to manipulate the response + after it is returned by the AddressValidation server but before + it is returned to user code. + """ + return response + + def pre_validate_address( + self, + request: address_validation_service.ValidateAddressRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + address_validation_service.ValidateAddressRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for validate_address + + Override in a subclass to manipulate the request or metadata + before they are sent to the AddressValidation server. + """ + return request, metadata + + def post_validate_address( + self, response: address_validation_service.ValidateAddressResponse + ) -> address_validation_service.ValidateAddressResponse: + """Post-rpc interceptor for validate_address + + Override in a subclass to manipulate the response + after it is returned by the AddressValidation server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AddressValidationRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AddressValidationRestInterceptor + + +class AddressValidationRestTransport(AddressValidationTransport): + """REST backend transport for AddressValidation. + + The service for validating addresses. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "addressvalidation.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AddressValidationRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AddressValidationRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ProvideValidationFeedback(AddressValidationRestStub): + def __hash__(self): + return hash("ProvideValidationFeedback") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: address_validation_service.ProvideValidationFeedbackRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> address_validation_service.ProvideValidationFeedbackResponse: + r"""Call the provide validation + feedback method over HTTP. + + Args: + request (~.address_validation_service.ProvideValidationFeedbackRequest): + The request object. The request for sending validation + feedback. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.address_validation_service.ProvideValidationFeedbackResponse: + The response for validation feedback. + The response is empty if the feedback is + sent successfully. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1:provideValidationFeedback", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_provide_validation_feedback( + request, metadata + ) + pb_request = address_validation_service.ProvideValidationFeedbackRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = address_validation_service.ProvideValidationFeedbackResponse() + pb_resp = address_validation_service.ProvideValidationFeedbackResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_provide_validation_feedback(resp) + return resp + + class _ValidateAddress(AddressValidationRestStub): + def __hash__(self): + return hash("ValidateAddress") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: address_validation_service.ValidateAddressRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> address_validation_service.ValidateAddressResponse: + r"""Call the validate address method over HTTP. + + Args: + request (~.address_validation_service.ValidateAddressRequest): + The request object. The request for validating an + address. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.address_validation_service.ValidateAddressResponse: + The response to an address validation + request. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1:validateAddress", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_validate_address( + request, metadata + ) + pb_request = address_validation_service.ValidateAddressRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = address_validation_service.ValidateAddressResponse() + pb_resp = address_validation_service.ValidateAddressResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_validate_address(resp) + return resp + + @property + def provide_validation_feedback( + self, + ) -> Callable[ + [address_validation_service.ProvideValidationFeedbackRequest], + address_validation_service.ProvideValidationFeedbackResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ProvideValidationFeedback(self._session, self._host, self._interceptor) # type: ignore + + @property + def validate_address( + self, + ) -> Callable[ + [address_validation_service.ValidateAddressRequest], + address_validation_service.ValidateAddressResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ValidateAddress(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AddressValidationRestTransport",) diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/address.py b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/address.py index 4c27cc732931..98a604a3cf8c 100644 --- a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/address.py +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/address.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.type import postal_address_pb2 # type: ignore @@ -29,17 +31,19 @@ class Address(proto.Message): - r"""Details of the address parsed from the input. + r"""Details of the post-processed address. Post-processing + includes correcting misspelled parts of the address, replacing + incorrect parts, and inferring missing parts. Attributes: formatted_address (str): - The corrected address, formatted as a + The post-processed address, formatted as a single-line address following the address formatting rules of the region where the address is located. postal_address (google.type.postal_address_pb2.PostalAddress): - The validated address represented as a postal - address. + The post-processed address represented as a + postal address. address_components (MutableSequence[google.maps.addressvalidation_v1.types.AddressComponent]): Unordered list. The individual address components of the formatted and corrected diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/address_validation_service.py b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/address_validation_service.py index ea51284bc00e..ea1d751cf0b7 100644 --- a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/address_validation_service.py +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/address_validation_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.type import postal_address_pb2 # type: ignore @@ -208,7 +210,9 @@ class ValidationResult(proto.Message): Information about the location and place that the address geocoded to. metadata (google.maps.addressvalidation_v1.types.AddressMetadata): - Other information relevant to deliverability. + Other information relevant to deliverability. ``metadata`` + is not guaranteed to be fully populated for every address + sent to the Address Validation API. usps_data (google.maps.addressvalidation_v1.types.UspsData): Extra deliverability flags provided by USPS. Only provided in region ``US`` and ``PR``. diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/geocode.py b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/geocode.py index 828473599212..7ba2894908a7 100644 --- a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/geocode.py +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/geocode.py @@ -13,9 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence -import google.geo.type.types # type: ignore +from google.geo.type.types import viewport from google.type import latlng_pb2 # type: ignore import proto # type: ignore @@ -77,10 +79,10 @@ class Geocode(proto.Message): number=2, message="PlusCode", ) - bounds: google.geo.type.types.Viewport = proto.Field( + bounds: viewport.Viewport = proto.Field( proto.MESSAGE, number=4, - message=google.geo.type.types.Viewport, + message=viewport.Viewport, ) feature_size_meters: float = proto.Field( proto.FLOAT, diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/metadata_.py b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/metadata_.py index 33c30b782a50..8dd2acf56672 100644 --- a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/metadata_.py +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/metadata_.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -26,7 +28,10 @@ class AddressMetadata(proto.Message): - r"""The metadata for the address. + r"""The metadata for the address. ``metadata`` is not guaranteed to be + fully populated for every address sent to the Address Validation + API. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields diff --git a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/usps_data.py b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/usps_data.py index a1ab1ff3db56..bb393d965fa8 100644 --- a/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/usps_data.py +++ b/packages/google-maps-addressvalidation/google/maps/addressvalidation_v1/types/usps_data.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -89,7 +91,11 @@ class UspsAddress(proto.Message): class UspsData(proto.Message): - r"""The USPS data for the address. + r"""The USPS data for the address. ``uspsData`` is not guaranteed to be + fully populated for every US or PR address sent to the Address + Validation API. It's recommended to integrate the backup address + fields in the response if you utilize uspsData as the primary part + of the response. Attributes: standardized_address (google.maps.addressvalidation_v1.types.UspsAddress): diff --git a/packages/google-maps-addressvalidation/noxfile.py b/packages/google-maps-addressvalidation/noxfile.py index fcacc5d528dd..1749edb5dad7 100644 --- a/packages/google-maps-addressvalidation/noxfile.py +++ b/packages/google-maps-addressvalidation/noxfile.py @@ -189,9 +189,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -378,8 +378,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", diff --git a/packages/google-maps-addressvalidation/samples/generated_samples/snippet_metadata_google.maps.addressvalidation.v1.json b/packages/google-maps-addressvalidation/samples/generated_samples/snippet_metadata_google.maps.addressvalidation.v1.json index d579244f041d..17ced2511dcc 100644 --- a/packages/google-maps-addressvalidation/samples/generated_samples/snippet_metadata_google.maps.addressvalidation.v1.json +++ b/packages/google-maps-addressvalidation/samples/generated_samples/snippet_metadata_google.maps.addressvalidation.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-addressvalidation", - "version": "0.2.1" + "version": "0.3.4" }, "snippets": [ { diff --git a/packages/google-maps-addressvalidation/setup.py b/packages/google-maps-addressvalidation/setup.py index 585209707dca..5ef7016ab5b7 100644 --- a/packages/google-maps-addressvalidation/setup.py +++ b/packages/google-maps-addressvalidation/setup.py @@ -42,7 +42,7 @@ "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-geo-type<1.0.0dev", + "google-geo-type >= 0.1.0, <1.0.0dev", ] url = "https://github.com/googleapis/google-cloud-python" @@ -58,9 +58,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.maps"] setuptools.setup( name=name, diff --git a/packages/google-maps-addressvalidation/testing/constraints-3.10.txt b/packages/google-maps-addressvalidation/testing/constraints-3.10.txt index ed7f9aed2559..2214a366a259 100644 --- a/packages/google-maps-addressvalidation/testing/constraints-3.10.txt +++ b/packages/google-maps-addressvalidation/testing/constraints-3.10.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +google-geo-type diff --git a/packages/google-maps-addressvalidation/testing/constraints-3.11.txt b/packages/google-maps-addressvalidation/testing/constraints-3.11.txt index ed7f9aed2559..2214a366a259 100644 --- a/packages/google-maps-addressvalidation/testing/constraints-3.11.txt +++ b/packages/google-maps-addressvalidation/testing/constraints-3.11.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +google-geo-type diff --git a/packages/google-maps-addressvalidation/testing/constraints-3.12.txt b/packages/google-maps-addressvalidation/testing/constraints-3.12.txt index ed7f9aed2559..2214a366a259 100644 --- a/packages/google-maps-addressvalidation/testing/constraints-3.12.txt +++ b/packages/google-maps-addressvalidation/testing/constraints-3.12.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +google-geo-type diff --git a/packages/google-maps-addressvalidation/testing/constraints-3.8.txt b/packages/google-maps-addressvalidation/testing/constraints-3.8.txt index ed7f9aed2559..2214a366a259 100644 --- a/packages/google-maps-addressvalidation/testing/constraints-3.8.txt +++ b/packages/google-maps-addressvalidation/testing/constraints-3.8.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +google-geo-type diff --git a/packages/google-maps-addressvalidation/testing/constraints-3.9.txt b/packages/google-maps-addressvalidation/testing/constraints-3.9.txt index ed7f9aed2559..2214a366a259 100644 --- a/packages/google-maps-addressvalidation/testing/constraints-3.9.txt +++ b/packages/google-maps-addressvalidation/testing/constraints-3.9.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +google-geo-type diff --git a/packages/google-maps-addressvalidation/tests/unit/gapic/addressvalidation_v1/test_address_validation.py b/packages/google-maps-addressvalidation/tests/unit/gapic/addressvalidation_v1/test_address_validation.py index 52d7a552b876..ee54de2de706 100644 --- a/packages/google-maps-addressvalidation/tests/unit/gapic/addressvalidation_v1/test_address_validation.py +++ b/packages/google-maps-addressvalidation/tests/unit/gapic/addressvalidation_v1/test_address_validation.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -31,12 +33,15 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account +from google.protobuf import json_format from google.type import postal_address_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.maps.addressvalidation_v1.services.address_validation import ( AddressValidationAsyncClient, @@ -96,6 +101,7 @@ def test__get_default_mtls_endpoint(): [ (AddressValidationClient, "grpc"), (AddressValidationAsyncClient, "grpc_asyncio"), + (AddressValidationClient, "rest"), ], ) def test_address_validation_client_from_service_account_info( @@ -111,7 +117,11 @@ def test_address_validation_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("addressvalidation.googleapis.com:443") + assert client.transport._host == ( + "addressvalidation.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://addressvalidation.googleapis.com" + ) @pytest.mark.parametrize( @@ -119,6 +129,7 @@ def test_address_validation_client_from_service_account_info( [ (transports.AddressValidationGrpcTransport, "grpc"), (transports.AddressValidationGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AddressValidationRestTransport, "rest"), ], ) def test_address_validation_client_service_account_always_use_jwt( @@ -144,6 +155,7 @@ def test_address_validation_client_service_account_always_use_jwt( [ (AddressValidationClient, "grpc"), (AddressValidationAsyncClient, "grpc_asyncio"), + (AddressValidationClient, "rest"), ], ) def test_address_validation_client_from_service_account_file( @@ -166,13 +178,18 @@ def test_address_validation_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("addressvalidation.googleapis.com:443") + assert client.transport._host == ( + "addressvalidation.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://addressvalidation.googleapis.com" + ) def test_address_validation_client_get_transport_class(): transport = AddressValidationClient.get_transport_class() available_transports = [ transports.AddressValidationGrpcTransport, + transports.AddressValidationRestTransport, ] assert transport in available_transports @@ -189,6 +206,7 @@ def test_address_validation_client_get_transport_class(): transports.AddressValidationGrpcAsyncIOTransport, "grpc_asyncio", ), + (AddressValidationClient, transports.AddressValidationRestTransport, "rest"), ], ) @mock.patch.object( @@ -344,6 +362,18 @@ def test_address_validation_client_client_options( "grpc_asyncio", "false", ), + ( + AddressValidationClient, + transports.AddressValidationRestTransport, + "rest", + "true", + ), + ( + AddressValidationClient, + transports.AddressValidationRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -543,6 +573,7 @@ def test_address_validation_client_get_mtls_endpoint_and_cert_source(client_clas transports.AddressValidationGrpcAsyncIOTransport, "grpc_asyncio", ), + (AddressValidationClient, transports.AddressValidationRestTransport, "rest"), ], ) def test_address_validation_client_client_options_scopes( @@ -583,6 +614,12 @@ def test_address_validation_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + AddressValidationClient, + transports.AddressValidationRestTransport, + "rest", + None, + ), ], ) def test_address_validation_client_client_options_credentials_file( @@ -881,6 +918,454 @@ async def test_provide_validation_feedback_async_from_dict(): await test_provide_validation_feedback_async(request_type=dict) +@pytest.mark.parametrize( + "request_type", + [ + address_validation_service.ValidateAddressRequest, + dict, + ], +) +def test_validate_address_rest(request_type): + client = AddressValidationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = address_validation_service.ValidateAddressResponse( + response_id="response_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = address_validation_service.ValidateAddressResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.validate_address(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, address_validation_service.ValidateAddressResponse) + assert response.response_id == "response_id_value" + + +def test_validate_address_rest_required_fields( + request_type=address_validation_service.ValidateAddressRequest, +): + transport_class = transports.AddressValidationRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).validate_address._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).validate_address._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AddressValidationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = address_validation_service.ValidateAddressResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = address_validation_service.ValidateAddressResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.validate_address(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_validate_address_rest_unset_required_fields(): + transport = transports.AddressValidationRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.validate_address._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("address",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_validate_address_rest_interceptors(null_interceptor): + transport = transports.AddressValidationRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AddressValidationRestInterceptor(), + ) + client = AddressValidationClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AddressValidationRestInterceptor, "post_validate_address" + ) as post, mock.patch.object( + transports.AddressValidationRestInterceptor, "pre_validate_address" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = address_validation_service.ValidateAddressRequest.pb( + address_validation_service.ValidateAddressRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + address_validation_service.ValidateAddressResponse.to_json( + address_validation_service.ValidateAddressResponse() + ) + ) + + request = address_validation_service.ValidateAddressRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = address_validation_service.ValidateAddressResponse() + + client.validate_address( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_validate_address_rest_bad_request( + transport: str = "rest", + request_type=address_validation_service.ValidateAddressRequest, +): + client = AddressValidationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.validate_address(request) + + +def test_validate_address_rest_error(): + client = AddressValidationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + address_validation_service.ProvideValidationFeedbackRequest, + dict, + ], +) +def test_provide_validation_feedback_rest(request_type): + client = AddressValidationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = address_validation_service.ProvideValidationFeedbackResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ( + address_validation_service.ProvideValidationFeedbackResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.provide_validation_feedback(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, address_validation_service.ProvideValidationFeedbackResponse + ) + + +def test_provide_validation_feedback_rest_required_fields( + request_type=address_validation_service.ProvideValidationFeedbackRequest, +): + transport_class = transports.AddressValidationRestTransport + + request_init = {} + request_init["response_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).provide_validation_feedback._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["responseId"] = "response_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).provide_validation_feedback._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "responseId" in jsonified_request + assert jsonified_request["responseId"] == "response_id_value" + + client = AddressValidationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = address_validation_service.ProvideValidationFeedbackResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = ( + address_validation_service.ProvideValidationFeedbackResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.provide_validation_feedback(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_provide_validation_feedback_rest_unset_required_fields(): + transport = transports.AddressValidationRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.provide_validation_feedback._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "conclusion", + "responseId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_provide_validation_feedback_rest_interceptors(null_interceptor): + transport = transports.AddressValidationRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AddressValidationRestInterceptor(), + ) + client = AddressValidationClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AddressValidationRestInterceptor, "post_provide_validation_feedback" + ) as post, mock.patch.object( + transports.AddressValidationRestInterceptor, "pre_provide_validation_feedback" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = address_validation_service.ProvideValidationFeedbackRequest.pb( + address_validation_service.ProvideValidationFeedbackRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + address_validation_service.ProvideValidationFeedbackResponse.to_json( + address_validation_service.ProvideValidationFeedbackResponse() + ) + ) + + request = address_validation_service.ProvideValidationFeedbackRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + address_validation_service.ProvideValidationFeedbackResponse() + ) + + client.provide_validation_feedback( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_provide_validation_feedback_rest_bad_request( + transport: str = "rest", + request_type=address_validation_service.ProvideValidationFeedbackRequest, +): + client = AddressValidationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.provide_validation_feedback(request) + + +def test_provide_validation_feedback_rest_error(): + client = AddressValidationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.AddressValidationGrpcTransport( @@ -962,6 +1447,7 @@ def test_transport_get_channel(): [ transports.AddressValidationGrpcTransport, transports.AddressValidationGrpcAsyncIOTransport, + transports.AddressValidationRestTransport, ], ) def test_transport_adc(transport_class): @@ -976,6 +1462,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -1106,6 +1593,7 @@ def test_address_validation_transport_auth_adc(transport_class): [ transports.AddressValidationGrpcTransport, transports.AddressValidationGrpcAsyncIOTransport, + transports.AddressValidationRestTransport, ], ) def test_address_validation_transport_auth_gdch_credentials(transport_class): @@ -1203,11 +1691,23 @@ def test_address_validation_grpc_transport_client_cert_source_for_mtls(transport ) +def test_address_validation_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AddressValidationRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_address_validation_host_no_port(transport_name): @@ -1218,7 +1718,11 @@ def test_address_validation_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("addressvalidation.googleapis.com:443") + assert client.transport._host == ( + "addressvalidation.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://addressvalidation.googleapis.com" + ) @pytest.mark.parametrize( @@ -1226,6 +1730,7 @@ def test_address_validation_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_address_validation_host_with_port(transport_name): @@ -1236,7 +1741,36 @@ def test_address_validation_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("addressvalidation.googleapis.com:8000") + assert client.transport._host == ( + "addressvalidation.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://addressvalidation.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_address_validation_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AddressValidationClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AddressValidationClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.validate_address._session + session2 = client2.transport.validate_address._session + assert session1 != session2 + session1 = client1.transport.provide_validation_feedback._session + session2 = client2.transport.provide_validation_feedback._session + assert session1 != session2 def test_address_validation_grpc_transport_channel(): @@ -1507,6 +2041,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -1524,6 +2059,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/packages/google-maps-mapsplatformdatasets/.OwlBot.yaml b/packages/google-maps-mapsplatformdatasets/.OwlBot.yaml new file mode 100644 index 000000000000..637bd313fca8 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/.OwlBot.yaml @@ -0,0 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/maps/mapsplatformdatasets/(.*)/.*-py + dest: /owl-bot-staging/google-maps-mapsplatformdatasets/$1 diff --git a/packages/google-maps-mapsplatformdatasets/.coveragerc b/packages/google-maps-mapsplatformdatasets/.coveragerc new file mode 100644 index 000000000000..9f3dc18dceda --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/maps/mapsplatformdatasets/__init__.py + google/maps/mapsplatformdatasets/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-maps-mapsplatformdatasets/.flake8 b/packages/google-maps-mapsplatformdatasets/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-maps-mapsplatformdatasets/.gitignore b/packages/google-maps-mapsplatformdatasets/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-maps-mapsplatformdatasets/.repo-metadata.json b/packages/google-maps-mapsplatformdatasets/.repo-metadata.json new file mode 100644 index 000000000000..70cf6a75357b --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "mapsplatformdatasets", + "name_pretty": "Maps Platform Datasets API", + "api_description": "Maps Platform Datasets API", + "product_documentation": "https://developers.google.com/maps", + "client_documentation": "https://googleapis.dev/python/mapsplatformdatasets/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-maps-mapsplatformdatasets", + "api_id": "mapsplatformdatasets.googleapis.com", + "default_version": "v1alpha", + "codeowner_team": "", + "api_shortname": "mapsplatformdatasets" +} diff --git a/packages/google-maps-mapsplatformdatasets/CHANGELOG.md b/packages/google-maps-mapsplatformdatasets/CHANGELOG.md new file mode 100644 index 000000000000..80d7feaf9980 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/CHANGELOG.md @@ -0,0 +1,36 @@ +# Changelog + +## [0.3.0](https://github.com/googleapis/google-cloud-python/compare/google-maps-mapsplatformdatasets-v0.2.1...google-maps-mapsplatformdatasets-v0.3.0) (2023-06-03) + + +### Features + +* Add client libraries for v1 ([#11226](https://github.com/googleapis/google-cloud-python/issues/11226)) ([08b0fe0](https://github.com/googleapis/google-cloud-python/commit/08b0fe07a7841095669eb498af17d656e10b38ea)) + + +### Documentation + +* fix broken client library documentation links ([#11192](https://github.com/googleapis/google-cloud-python/issues/11192)) ([5e17f7a](https://github.com/googleapis/google-cloud-python/commit/5e17f7a901bbbae8ff9a44ed62f1abd2386da2c8)) + +## [0.2.1](https://github.com/googleapis/google-cloud-python/compare/google-maps-mapsplatformdatasets-v0.2.0...google-maps-mapsplatformdatasets-v0.2.1) (2023-03-25) + + +### Documentation + +* Fix formatting of request arg in docstring ([#10867](https://github.com/googleapis/google-cloud-python/issues/10867)) ([d34a425](https://github.com/googleapis/google-cloud-python/commit/d34a425f7d0f02bebaf20d24b725b8c25c699697)) + +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-maps-mapsplatformdatasets-v0.1.0...google-maps-mapsplatformdatasets-v0.2.0) (2023-02-09) + + +### Features + +* enable "rest" transport in Python for services supporting numeric enums ([#10839](https://github.com/googleapis/google-cloud-python/issues/10839)) ([ad59d56](https://github.com/googleapis/google-cloud-python/commit/ad59d569bda339ed31500602e2db369afdbfcf0b)) + +## 0.1.0 (2023-01-31) + + +### Features + +* add initial files for google.maps.mapsplatformdatasets.v1alpha ([#10831](https://github.com/googleapis/google-cloud-python/issues/10831)) ([339f07b](https://github.com/googleapis/google-cloud-python/commit/339f07bca21ed0955f0e04c71067ec96253faf02)) + +## Changelog diff --git a/packages/google-maps-mapsplatformdatasets/CODE_OF_CONDUCT.md b/packages/google-maps-mapsplatformdatasets/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-maps-mapsplatformdatasets/CONTRIBUTING.rst b/packages/google-maps-mapsplatformdatasets/CONTRIBUTING.rst new file mode 100644 index 000000000000..c70445d7b00c --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-maps-mapsplatformdatasets + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-maps-mapsplatformdatasets/LICENSE b/packages/google-maps-mapsplatformdatasets/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-maps-mapsplatformdatasets/MANIFEST.in b/packages/google-maps-mapsplatformdatasets/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-maps-mapsplatformdatasets/README.rst b/packages/google-maps-mapsplatformdatasets/README.rst new file mode 100644 index 000000000000..1567e99dcff3 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/README.rst @@ -0,0 +1,107 @@ +Python Client for Maps Platform Datasets API +============================================ + +|preview| |pypi| |versions| + +`Maps Platform Datasets API`_: Maps Platform Datasets API + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-maps-mapsplatformdatasets.svg + :target: https://pypi.org/project/google-maps-mapsplatformdatasets/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-maps-mapsplatformdatasets.svg + :target: https://pypi.org/project/google-maps-mapsplatformdatasets/ +.. _Maps Platform Datasets API: https://developers.google.com/maps +.. _Client Library Documentation: https://googleapis.dev/python/mapsplatformdatasets/latest +.. _Product Documentation: https://developers.google.com/maps + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Maps Platform Datasets API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Maps Platform Datasets API.: https://developers.google.com/maps +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-maps-mapsplatformdatasets + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-maps-mapsplatformdatasets + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Maps Platform Datasets API + to see other available methods on the client. +- Read the `Maps Platform Datasets API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Maps Platform Datasets API Product documentation: https://developers.google.com/maps +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-maps-mapsplatformdatasets/SECURITY.md b/packages/google-maps-mapsplatformdatasets/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-maps-mapsplatformdatasets/docs/CHANGELOG.md b/packages/google-maps-mapsplatformdatasets/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-maps-mapsplatformdatasets/docs/README.rst b/packages/google-maps-mapsplatformdatasets/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-maps-mapsplatformdatasets/docs/_static/custom.css b/packages/google-maps-mapsplatformdatasets/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-maps-mapsplatformdatasets/docs/_templates/layout.html b/packages/google-maps-mapsplatformdatasets/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-maps-mapsplatformdatasets/docs/conf.py b/packages/google-maps-mapsplatformdatasets/docs/conf.py new file mode 100644 index 000000000000..6d1a208ce023 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-maps-mapsplatformdatasets documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-maps-mapsplatformdatasets" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-maps-mapsplatformdatasets", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-maps-mapsplatformdatasets-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-maps-mapsplatformdatasets.tex", + "google-maps-mapsplatformdatasets Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-maps-mapsplatformdatasets", + "google-maps-mapsplatformdatasets Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-maps-mapsplatformdatasets", + "google-maps-mapsplatformdatasets Documentation", + author, + "google-maps-mapsplatformdatasets", + "google-maps-mapsplatformdatasets Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-maps-mapsplatformdatasets/docs/index.rst b/packages/google-maps-mapsplatformdatasets/docs/index.rst new file mode 100644 index 000000000000..0578e94816f3 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/docs/index.rst @@ -0,0 +1,34 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of Maps Platform Datasets API. +By default, you will get version ``mapsplatformdatasets_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + mapsplatformdatasets_v1/services + mapsplatformdatasets_v1/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + mapsplatformdatasets_v1alpha/services + mapsplatformdatasets_v1alpha/types + + +Changelog +--------- + +For a list of all ``google-maps-mapsplatformdatasets`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1/maps_platform_datasets.rst b/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1/maps_platform_datasets.rst new file mode 100644 index 000000000000..d9eff02a3f2b --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1/maps_platform_datasets.rst @@ -0,0 +1,10 @@ +MapsPlatformDatasets +-------------------------------------- + +.. automodule:: google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets + :members: + :inherited-members: + +.. automodule:: google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets.pagers + :members: + :inherited-members: diff --git a/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1/services.rst b/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1/services.rst new file mode 100644 index 000000000000..f2d8920fc6af --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Maps Mapsplatformdatasets v1 API +==================================================== +.. toctree:: + :maxdepth: 2 + + maps_platform_datasets diff --git a/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1/types.rst b/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1/types.rst new file mode 100644 index 000000000000..f4d2f2d7e793 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Maps Mapsplatformdatasets v1 API +================================================= + +.. automodule:: google.maps.mapsplatformdatasets_v1.types + :members: + :show-inheritance: diff --git a/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/maps_platform_datasets_v1_alpha.rst b/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/maps_platform_datasets_v1_alpha.rst new file mode 100644 index 000000000000..c47c959a89c9 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/maps_platform_datasets_v1_alpha.rst @@ -0,0 +1,10 @@ +MapsPlatformDatasetsV1Alpha +--------------------------------------------- + +.. automodule:: google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha + :members: + :inherited-members: + +.. automodule:: google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers + :members: + :inherited-members: diff --git a/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/services.rst b/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/services.rst new file mode 100644 index 000000000000..ad5d13db62d9 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/services.rst @@ -0,0 +1,6 @@ +Services for Google Maps Mapsplatformdatasets v1alpha API +========================================================= +.. toctree:: + :maxdepth: 2 + + maps_platform_datasets_v1_alpha diff --git a/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/types.rst b/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/types.rst new file mode 100644 index 000000000000..d938f9f9a8a6 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/docs/mapsplatformdatasets_v1alpha/types.rst @@ -0,0 +1,6 @@ +Types for Google Maps Mapsplatformdatasets v1alpha API +====================================================== + +.. automodule:: google.maps.mapsplatformdatasets_v1alpha.types + :members: + :show-inheritance: diff --git a/packages/google-maps-mapsplatformdatasets/docs/multiprocessing.rst b/packages/google-maps-mapsplatformdatasets/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/__init__.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/__init__.py new file mode 100644 index 000000000000..303467f05536 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/__init__.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.maps.mapsplatformdatasets import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.async_client import ( + MapsPlatformDatasetsV1AlphaAsyncClient, +) +from google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.client import ( + MapsPlatformDatasetsV1AlphaClient, +) +from google.maps.mapsplatformdatasets_v1alpha.types.data_source import ( + FileFormat, + GcsSource, + LocalFileSource, +) +from google.maps.mapsplatformdatasets_v1alpha.types.dataset import Dataset, State, Usage +from google.maps.mapsplatformdatasets_v1alpha.types.maps_platform_datasets import ( + CreateDatasetRequest, + DeleteDatasetRequest, + DeleteDatasetVersionRequest, + GetDatasetRequest, + ListDatasetsRequest, + ListDatasetsResponse, + ListDatasetVersionsRequest, + ListDatasetVersionsResponse, + UpdateDatasetMetadataRequest, +) + +__all__ = ( + "MapsPlatformDatasetsV1AlphaClient", + "MapsPlatformDatasetsV1AlphaAsyncClient", + "GcsSource", + "LocalFileSource", + "FileFormat", + "Dataset", + "State", + "Usage", + "CreateDatasetRequest", + "DeleteDatasetRequest", + "DeleteDatasetVersionRequest", + "GetDatasetRequest", + "ListDatasetsRequest", + "ListDatasetsResponse", + "ListDatasetVersionsRequest", + "ListDatasetVersionsResponse", + "UpdateDatasetMetadataRequest", +) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/gapic_version.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/gapic_version.py new file mode 100644 index 000000000000..a6ccd663d16f --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.3.0" # {x-release-please-version} diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/py.typed b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/py.typed new file mode 100644 index 000000000000..b186faa8f14d --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-maps-mapsplatformdatasets package uses inline types. diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/__init__.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/__init__.py new file mode 100644 index 000000000000..c2e2a532ce97 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/__init__.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.maps.mapsplatformdatasets_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.maps_platform_datasets import ( + MapsPlatformDatasetsAsyncClient, + MapsPlatformDatasetsClient, +) +from .types.data_source import FileFormat, GcsSource, LocalFileSource +from .types.dataset import Dataset, Status, Usage +from .types.maps_platform_datasets import ( + CreateDatasetRequest, + DeleteDatasetRequest, + GetDatasetRequest, + ListDatasetsRequest, + ListDatasetsResponse, + UpdateDatasetMetadataRequest, +) + +__all__ = ( + "MapsPlatformDatasetsAsyncClient", + "CreateDatasetRequest", + "Dataset", + "DeleteDatasetRequest", + "FileFormat", + "GcsSource", + "GetDatasetRequest", + "ListDatasetsRequest", + "ListDatasetsResponse", + "LocalFileSource", + "MapsPlatformDatasetsClient", + "Status", + "UpdateDatasetMetadataRequest", + "Usage", +) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/gapic_metadata.json b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/gapic_metadata.json new file mode 100644 index 000000000000..5c6635d4e431 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/gapic_metadata.json @@ -0,0 +1,103 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.maps.mapsplatformdatasets_v1", + "protoPackage": "google.maps.mapsplatformdatasets.v1", + "schema": "1.0", + "services": { + "MapsPlatformDatasets": { + "clients": { + "grpc": { + "libraryClient": "MapsPlatformDatasetsClient", + "rpcs": { + "CreateDataset": { + "methods": [ + "create_dataset" + ] + }, + "DeleteDataset": { + "methods": [ + "delete_dataset" + ] + }, + "GetDataset": { + "methods": [ + "get_dataset" + ] + }, + "ListDatasets": { + "methods": [ + "list_datasets" + ] + }, + "UpdateDatasetMetadata": { + "methods": [ + "update_dataset_metadata" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MapsPlatformDatasetsAsyncClient", + "rpcs": { + "CreateDataset": { + "methods": [ + "create_dataset" + ] + }, + "DeleteDataset": { + "methods": [ + "delete_dataset" + ] + }, + "GetDataset": { + "methods": [ + "get_dataset" + ] + }, + "ListDatasets": { + "methods": [ + "list_datasets" + ] + }, + "UpdateDatasetMetadata": { + "methods": [ + "update_dataset_metadata" + ] + } + } + }, + "rest": { + "libraryClient": "MapsPlatformDatasetsClient", + "rpcs": { + "CreateDataset": { + "methods": [ + "create_dataset" + ] + }, + "DeleteDataset": { + "methods": [ + "delete_dataset" + ] + }, + "GetDataset": { + "methods": [ + "get_dataset" + ] + }, + "ListDatasets": { + "methods": [ + "list_datasets" + ] + }, + "UpdateDatasetMetadata": { + "methods": [ + "update_dataset_metadata" + ] + } + } + } + } + } + } +} diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/gapic_version.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/gapic_version.py new file mode 100644 index 000000000000..405b1cebcf15 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/py.typed b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/py.typed new file mode 100644 index 000000000000..b186faa8f14d --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-maps-mapsplatformdatasets package uses inline types. diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/__init__.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/__init__.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/__init__.py new file mode 100644 index 000000000000..951f1be8e632 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import MapsPlatformDatasetsAsyncClient +from .client import MapsPlatformDatasetsClient + +__all__ = ( + "MapsPlatformDatasetsClient", + "MapsPlatformDatasetsAsyncClient", +) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/async_client.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/async_client.py new file mode 100644 index 000000000000..8ed51a2a5013 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/async_client.py @@ -0,0 +1,794 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.maps.mapsplatformdatasets_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets import pagers +from google.maps.mapsplatformdatasets_v1.types import data_source +from google.maps.mapsplatformdatasets_v1.types import dataset +from google.maps.mapsplatformdatasets_v1.types import dataset as gmm_dataset +from google.maps.mapsplatformdatasets_v1.types import maps_platform_datasets + +from .client import MapsPlatformDatasetsClient +from .transports.base import DEFAULT_CLIENT_INFO, MapsPlatformDatasetsTransport +from .transports.grpc_asyncio import MapsPlatformDatasetsGrpcAsyncIOTransport + + +class MapsPlatformDatasetsAsyncClient: + """Service definition for the Maps Platform Datasets API.""" + + _client: MapsPlatformDatasetsClient + + DEFAULT_ENDPOINT = MapsPlatformDatasetsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MapsPlatformDatasetsClient.DEFAULT_MTLS_ENDPOINT + + dataset_path = staticmethod(MapsPlatformDatasetsClient.dataset_path) + parse_dataset_path = staticmethod(MapsPlatformDatasetsClient.parse_dataset_path) + common_billing_account_path = staticmethod( + MapsPlatformDatasetsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MapsPlatformDatasetsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(MapsPlatformDatasetsClient.common_folder_path) + parse_common_folder_path = staticmethod( + MapsPlatformDatasetsClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + MapsPlatformDatasetsClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + MapsPlatformDatasetsClient.parse_common_organization_path + ) + common_project_path = staticmethod(MapsPlatformDatasetsClient.common_project_path) + parse_common_project_path = staticmethod( + MapsPlatformDatasetsClient.parse_common_project_path + ) + common_location_path = staticmethod(MapsPlatformDatasetsClient.common_location_path) + parse_common_location_path = staticmethod( + MapsPlatformDatasetsClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MapsPlatformDatasetsAsyncClient: The constructed client. + """ + return MapsPlatformDatasetsClient.from_service_account_info.__func__(MapsPlatformDatasetsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MapsPlatformDatasetsAsyncClient: The constructed client. + """ + return MapsPlatformDatasetsClient.from_service_account_file.__func__(MapsPlatformDatasetsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MapsPlatformDatasetsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MapsPlatformDatasetsTransport: + """Returns the transport used by the client instance. + + Returns: + MapsPlatformDatasetsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(MapsPlatformDatasetsClient).get_transport_class, + type(MapsPlatformDatasetsClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, MapsPlatformDatasetsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the maps platform datasets client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.MapsPlatformDatasetsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MapsPlatformDatasetsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_dataset( + self, + request: Optional[ + Union[maps_platform_datasets.CreateDatasetRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + dataset: Optional[gmm_dataset.Dataset] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gmm_dataset.Dataset: + r"""Create a new dataset for the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1 + + async def sample_create_dataset(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.CreateDatasetRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_dataset(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.maps.mapsplatformdatasets_v1.types.CreateDatasetRequest, dict]]): + The request object. Request to create a maps dataset. + parent (:class:`str`): + Required. Parent project that will + own the dataset. Format: + projects/{$project} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dataset (:class:`google.maps.mapsplatformdatasets_v1.types.Dataset`): + Required. The dataset version to + create. + + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1.types.Dataset: + A representation of a Maps Dataset + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, dataset]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = maps_platform_datasets.CreateDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if dataset is not None: + request.dataset = dataset + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_dataset, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_dataset_metadata( + self, + request: Optional[ + Union[maps_platform_datasets.UpdateDatasetMetadataRequest, dict] + ] = None, + *, + dataset: Optional[gmm_dataset.Dataset] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gmm_dataset.Dataset: + r"""Update the metadata for the dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1 + + async def sample_update_dataset_metadata(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.UpdateDatasetMetadataRequest( + ) + + # Make the request + response = await client.update_dataset_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.maps.mapsplatformdatasets_v1.types.UpdateDatasetMetadataRequest, dict]]): + The request object. Request to update the metadata fields + of the dataset. + dataset (:class:`google.maps.mapsplatformdatasets_v1.types.Dataset`): + Required. The dataset to update. The dataset's name is + used to identify the dataset to be updated. The name has + the format: projects/{project}/datasets/{dataset_id} + + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to be updated. Support the value "*" + for full replacement. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1.types.Dataset: + A representation of a Maps Dataset + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([dataset, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = maps_platform_datasets.UpdateDatasetMetadataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dataset is not None: + request.dataset = dataset + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_dataset_metadata, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dataset.name", request.dataset.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_dataset( + self, + request: Optional[Union[maps_platform_datasets.GetDatasetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dataset.Dataset: + r"""Get the dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1 + + async def sample_get_dataset(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.GetDatasetRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dataset(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.maps.mapsplatformdatasets_v1.types.GetDatasetRequest, dict]]): + The request object. Request to get the specified dataset. + name (:class:`str`): + Required. Resource name. + projects/{project}/datasets/{dataset_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1.types.Dataset: + A representation of a Maps Dataset + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = maps_platform_datasets.GetDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_dataset, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_datasets( + self, + request: Optional[ + Union[maps_platform_datasets.ListDatasetsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatasetsAsyncPager: + r"""List all the datasets for the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1 + + async def sample_list_datasets(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.ListDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_datasets(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.maps.mapsplatformdatasets_v1.types.ListDatasetsRequest, dict]]): + The request object. Request to list datasets for the + project. + parent (:class:`str`): + Required. The name of the project to + list all the datasets for. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets.pagers.ListDatasetsAsyncPager: + Response to list datasets for the + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = maps_platform_datasets.ListDatasetsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_datasets, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatasetsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_dataset( + self, + request: Optional[ + Union[maps_platform_datasets.DeleteDatasetRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete the specified dataset . + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1 + + async def sample_delete_dataset(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.DeleteDatasetRequest( + name="name_value", + ) + + # Make the request + await client.delete_dataset(request=request) + + Args: + request (Optional[Union[google.maps.mapsplatformdatasets_v1.types.DeleteDatasetRequest, dict]]): + The request object. Request to delete a dataset. + The dataset to be deleted. + name (:class:`str`): + Required. Format: + projects/${project}/datasets/{dataset_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = maps_platform_datasets.DeleteDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_dataset, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MapsPlatformDatasetsAsyncClient",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/client.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/client.py new file mode 100644 index 000000000000..c9a3a979a192 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/client.py @@ -0,0 +1,1006 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.maps.mapsplatformdatasets_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets import pagers +from google.maps.mapsplatformdatasets_v1.types import data_source +from google.maps.mapsplatformdatasets_v1.types import dataset +from google.maps.mapsplatformdatasets_v1.types import dataset as gmm_dataset +from google.maps.mapsplatformdatasets_v1.types import maps_platform_datasets + +from .transports.base import DEFAULT_CLIENT_INFO, MapsPlatformDatasetsTransport +from .transports.grpc import MapsPlatformDatasetsGrpcTransport +from .transports.grpc_asyncio import MapsPlatformDatasetsGrpcAsyncIOTransport +from .transports.rest import MapsPlatformDatasetsRestTransport + + +class MapsPlatformDatasetsClientMeta(type): + """Metaclass for the MapsPlatformDatasets client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[MapsPlatformDatasetsTransport]] + _transport_registry["grpc"] = MapsPlatformDatasetsGrpcTransport + _transport_registry["grpc_asyncio"] = MapsPlatformDatasetsGrpcAsyncIOTransport + _transport_registry["rest"] = MapsPlatformDatasetsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MapsPlatformDatasetsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MapsPlatformDatasetsClient(metaclass=MapsPlatformDatasetsClientMeta): + """Service definition for the Maps Platform Datasets API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "mapsplatformdatasets.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MapsPlatformDatasetsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MapsPlatformDatasetsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MapsPlatformDatasetsTransport: + """Returns the transport used by the client instance. + + Returns: + MapsPlatformDatasetsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def dataset_path( + project: str, + dataset: str, + ) -> str: + """Returns a fully-qualified dataset string.""" + return "projects/{project}/datasets/{dataset}".format( + project=project, + dataset=dataset, + ) + + @staticmethod + def parse_dataset_path(path: str) -> Dict[str, str]: + """Parses a dataset path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MapsPlatformDatasetsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the maps platform datasets client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, MapsPlatformDatasetsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MapsPlatformDatasetsTransport): + # transport is a MapsPlatformDatasetsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_dataset( + self, + request: Optional[ + Union[maps_platform_datasets.CreateDatasetRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + dataset: Optional[gmm_dataset.Dataset] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gmm_dataset.Dataset: + r"""Create a new dataset for the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1 + + def sample_create_dataset(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.CreateDatasetRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_dataset(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.maps.mapsplatformdatasets_v1.types.CreateDatasetRequest, dict]): + The request object. Request to create a maps dataset. + parent (str): + Required. Parent project that will + own the dataset. Format: + projects/{$project} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dataset (google.maps.mapsplatformdatasets_v1.types.Dataset): + Required. The dataset version to + create. + + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1.types.Dataset: + A representation of a Maps Dataset + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, dataset]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a maps_platform_datasets.CreateDatasetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, maps_platform_datasets.CreateDatasetRequest): + request = maps_platform_datasets.CreateDatasetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if dataset is not None: + request.dataset = dataset + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_dataset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_dataset_metadata( + self, + request: Optional[ + Union[maps_platform_datasets.UpdateDatasetMetadataRequest, dict] + ] = None, + *, + dataset: Optional[gmm_dataset.Dataset] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gmm_dataset.Dataset: + r"""Update the metadata for the dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1 + + def sample_update_dataset_metadata(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.UpdateDatasetMetadataRequest( + ) + + # Make the request + response = client.update_dataset_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.maps.mapsplatformdatasets_v1.types.UpdateDatasetMetadataRequest, dict]): + The request object. Request to update the metadata fields + of the dataset. + dataset (google.maps.mapsplatformdatasets_v1.types.Dataset): + Required. The dataset to update. The dataset's name is + used to identify the dataset to be updated. The name has + the format: projects/{project}/datasets/{dataset_id} + + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated. Support the value "*" + for full replacement. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1.types.Dataset: + A representation of a Maps Dataset + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([dataset, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a maps_platform_datasets.UpdateDatasetMetadataRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, maps_platform_datasets.UpdateDatasetMetadataRequest): + request = maps_platform_datasets.UpdateDatasetMetadataRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dataset is not None: + request.dataset = dataset + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_dataset_metadata] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dataset.name", request.dataset.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_dataset( + self, + request: Optional[Union[maps_platform_datasets.GetDatasetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dataset.Dataset: + r"""Get the dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1 + + def sample_get_dataset(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.GetDatasetRequest( + name="name_value", + ) + + # Make the request + response = client.get_dataset(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.maps.mapsplatformdatasets_v1.types.GetDatasetRequest, dict]): + The request object. Request to get the specified dataset. + name (str): + Required. Resource name. + projects/{project}/datasets/{dataset_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1.types.Dataset: + A representation of a Maps Dataset + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a maps_platform_datasets.GetDatasetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, maps_platform_datasets.GetDatasetRequest): + request = maps_platform_datasets.GetDatasetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dataset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_datasets( + self, + request: Optional[ + Union[maps_platform_datasets.ListDatasetsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatasetsPager: + r"""List all the datasets for the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1 + + def sample_list_datasets(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.ListDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_datasets(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.maps.mapsplatformdatasets_v1.types.ListDatasetsRequest, dict]): + The request object. Request to list datasets for the + project. + parent (str): + Required. The name of the project to + list all the datasets for. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets.pagers.ListDatasetsPager: + Response to list datasets for the + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a maps_platform_datasets.ListDatasetsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, maps_platform_datasets.ListDatasetsRequest): + request = maps_platform_datasets.ListDatasetsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_datasets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatasetsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_dataset( + self, + request: Optional[ + Union[maps_platform_datasets.DeleteDatasetRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete the specified dataset . + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1 + + def sample_delete_dataset(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.DeleteDatasetRequest( + name="name_value", + ) + + # Make the request + client.delete_dataset(request=request) + + Args: + request (Union[google.maps.mapsplatformdatasets_v1.types.DeleteDatasetRequest, dict]): + The request object. Request to delete a dataset. + The dataset to be deleted. + name (str): + Required. Format: + projects/${project}/datasets/{dataset_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a maps_platform_datasets.DeleteDatasetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, maps_platform_datasets.DeleteDatasetRequest): + request = maps_platform_datasets.DeleteDatasetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_dataset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "MapsPlatformDatasetsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MapsPlatformDatasetsClient",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/pagers.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/pagers.py new file mode 100644 index 000000000000..40710eaa5aaa --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.maps.mapsplatformdatasets_v1.types import dataset, maps_platform_datasets + + +class ListDatasetsPager: + """A pager for iterating through ``list_datasets`` requests. + + This class thinly wraps an initial + :class:`google.maps.mapsplatformdatasets_v1.types.ListDatasetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``datasets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatasets`` requests and continue to iterate + through the ``datasets`` field on the + corresponding responses. + + All the usual :class:`google.maps.mapsplatformdatasets_v1.types.ListDatasetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., maps_platform_datasets.ListDatasetsResponse], + request: maps_platform_datasets.ListDatasetsRequest, + response: maps_platform_datasets.ListDatasetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.maps.mapsplatformdatasets_v1.types.ListDatasetsRequest): + The initial request object. + response (google.maps.mapsplatformdatasets_v1.types.ListDatasetsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = maps_platform_datasets.ListDatasetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[maps_platform_datasets.ListDatasetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dataset.Dataset]: + for page in self.pages: + yield from page.datasets + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDatasetsAsyncPager: + """A pager for iterating through ``list_datasets`` requests. + + This class thinly wraps an initial + :class:`google.maps.mapsplatformdatasets_v1.types.ListDatasetsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``datasets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatasets`` requests and continue to iterate + through the ``datasets`` field on the + corresponding responses. + + All the usual :class:`google.maps.mapsplatformdatasets_v1.types.ListDatasetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[maps_platform_datasets.ListDatasetsResponse]], + request: maps_platform_datasets.ListDatasetsRequest, + response: maps_platform_datasets.ListDatasetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.maps.mapsplatformdatasets_v1.types.ListDatasetsRequest): + The initial request object. + response (google.maps.mapsplatformdatasets_v1.types.ListDatasetsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = maps_platform_datasets.ListDatasetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[maps_platform_datasets.ListDatasetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[dataset.Dataset]: + async def async_generator(): + async for page in self.pages: + for response in page.datasets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/__init__.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/__init__.py new file mode 100644 index 000000000000..314440be6aef --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MapsPlatformDatasetsTransport +from .grpc import MapsPlatformDatasetsGrpcTransport +from .grpc_asyncio import MapsPlatformDatasetsGrpcAsyncIOTransport +from .rest import MapsPlatformDatasetsRestInterceptor, MapsPlatformDatasetsRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[MapsPlatformDatasetsTransport]] +_transport_registry["grpc"] = MapsPlatformDatasetsGrpcTransport +_transport_registry["grpc_asyncio"] = MapsPlatformDatasetsGrpcAsyncIOTransport +_transport_registry["rest"] = MapsPlatformDatasetsRestTransport + +__all__ = ( + "MapsPlatformDatasetsTransport", + "MapsPlatformDatasetsGrpcTransport", + "MapsPlatformDatasetsGrpcAsyncIOTransport", + "MapsPlatformDatasetsRestTransport", + "MapsPlatformDatasetsRestInterceptor", +) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/base.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/base.py new file mode 100644 index 000000000000..a0e3441fe877 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/base.py @@ -0,0 +1,235 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.maps.mapsplatformdatasets_v1 import gapic_version as package_version +from google.maps.mapsplatformdatasets_v1.types import dataset +from google.maps.mapsplatformdatasets_v1.types import dataset as gmm_dataset +from google.maps.mapsplatformdatasets_v1.types import maps_platform_datasets + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class MapsPlatformDatasetsTransport(abc.ABC): + """Abstract transport class for MapsPlatformDatasets.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "mapsplatformdatasets.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_dataset: gapic_v1.method.wrap_method( + self.create_dataset, + default_timeout=60.0, + client_info=client_info, + ), + self.update_dataset_metadata: gapic_v1.method.wrap_method( + self.update_dataset_metadata, + default_timeout=60.0, + client_info=client_info, + ), + self.get_dataset: gapic_v1.method.wrap_method( + self.get_dataset, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_datasets: gapic_v1.method.wrap_method( + self.list_datasets, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_dataset: gapic_v1.method.wrap_method( + self.delete_dataset, + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_dataset( + self, + ) -> Callable[ + [maps_platform_datasets.CreateDatasetRequest], + Union[gmm_dataset.Dataset, Awaitable[gmm_dataset.Dataset]], + ]: + raise NotImplementedError() + + @property + def update_dataset_metadata( + self, + ) -> Callable[ + [maps_platform_datasets.UpdateDatasetMetadataRequest], + Union[gmm_dataset.Dataset, Awaitable[gmm_dataset.Dataset]], + ]: + raise NotImplementedError() + + @property + def get_dataset( + self, + ) -> Callable[ + [maps_platform_datasets.GetDatasetRequest], + Union[dataset.Dataset, Awaitable[dataset.Dataset]], + ]: + raise NotImplementedError() + + @property + def list_datasets( + self, + ) -> Callable[ + [maps_platform_datasets.ListDatasetsRequest], + Union[ + maps_platform_datasets.ListDatasetsResponse, + Awaitable[maps_platform_datasets.ListDatasetsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_dataset( + self, + ) -> Callable[ + [maps_platform_datasets.DeleteDatasetRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("MapsPlatformDatasetsTransport",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/grpc.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/grpc.py new file mode 100644 index 000000000000..671e001ce37f --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/grpc.py @@ -0,0 +1,377 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.maps.mapsplatformdatasets_v1.types import dataset +from google.maps.mapsplatformdatasets_v1.types import dataset as gmm_dataset +from google.maps.mapsplatformdatasets_v1.types import maps_platform_datasets + +from .base import DEFAULT_CLIENT_INFO, MapsPlatformDatasetsTransport + + +class MapsPlatformDatasetsGrpcTransport(MapsPlatformDatasetsTransport): + """gRPC backend transport for MapsPlatformDatasets. + + Service definition for the Maps Platform Datasets API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "mapsplatformdatasets.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "mapsplatformdatasets.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_dataset( + self, + ) -> Callable[[maps_platform_datasets.CreateDatasetRequest], gmm_dataset.Dataset]: + r"""Return a callable for the create dataset method over gRPC. + + Create a new dataset for the specified project. + + Returns: + Callable[[~.CreateDatasetRequest], + ~.Dataset]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dataset" not in self._stubs: + self._stubs["create_dataset"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets/CreateDataset", + request_serializer=maps_platform_datasets.CreateDatasetRequest.serialize, + response_deserializer=gmm_dataset.Dataset.deserialize, + ) + return self._stubs["create_dataset"] + + @property + def update_dataset_metadata( + self, + ) -> Callable[ + [maps_platform_datasets.UpdateDatasetMetadataRequest], gmm_dataset.Dataset + ]: + r"""Return a callable for the update dataset metadata method over gRPC. + + Update the metadata for the dataset. + + Returns: + Callable[[~.UpdateDatasetMetadataRequest], + ~.Dataset]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_dataset_metadata" not in self._stubs: + self._stubs["update_dataset_metadata"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets/UpdateDatasetMetadata", + request_serializer=maps_platform_datasets.UpdateDatasetMetadataRequest.serialize, + response_deserializer=gmm_dataset.Dataset.deserialize, + ) + return self._stubs["update_dataset_metadata"] + + @property + def get_dataset( + self, + ) -> Callable[[maps_platform_datasets.GetDatasetRequest], dataset.Dataset]: + r"""Return a callable for the get dataset method over gRPC. + + Get the dataset. + + Returns: + Callable[[~.GetDatasetRequest], + ~.Dataset]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dataset" not in self._stubs: + self._stubs["get_dataset"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets/GetDataset", + request_serializer=maps_platform_datasets.GetDatasetRequest.serialize, + response_deserializer=dataset.Dataset.deserialize, + ) + return self._stubs["get_dataset"] + + @property + def list_datasets( + self, + ) -> Callable[ + [maps_platform_datasets.ListDatasetsRequest], + maps_platform_datasets.ListDatasetsResponse, + ]: + r"""Return a callable for the list datasets method over gRPC. + + List all the datasets for the specified project. + + Returns: + Callable[[~.ListDatasetsRequest], + ~.ListDatasetsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_datasets" not in self._stubs: + self._stubs["list_datasets"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets/ListDatasets", + request_serializer=maps_platform_datasets.ListDatasetsRequest.serialize, + response_deserializer=maps_platform_datasets.ListDatasetsResponse.deserialize, + ) + return self._stubs["list_datasets"] + + @property + def delete_dataset( + self, + ) -> Callable[[maps_platform_datasets.DeleteDatasetRequest], empty_pb2.Empty]: + r"""Return a callable for the delete dataset method over gRPC. + + Delete the specified dataset . + + Returns: + Callable[[~.DeleteDatasetRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dataset" not in self._stubs: + self._stubs["delete_dataset"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets/DeleteDataset", + request_serializer=maps_platform_datasets.DeleteDatasetRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_dataset"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("MapsPlatformDatasetsGrpcTransport",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/grpc_asyncio.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/grpc_asyncio.py new file mode 100644 index 000000000000..99a4280b0923 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/grpc_asyncio.py @@ -0,0 +1,383 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.maps.mapsplatformdatasets_v1.types import dataset +from google.maps.mapsplatformdatasets_v1.types import dataset as gmm_dataset +from google.maps.mapsplatformdatasets_v1.types import maps_platform_datasets + +from .base import DEFAULT_CLIENT_INFO, MapsPlatformDatasetsTransport +from .grpc import MapsPlatformDatasetsGrpcTransport + + +class MapsPlatformDatasetsGrpcAsyncIOTransport(MapsPlatformDatasetsTransport): + """gRPC AsyncIO backend transport for MapsPlatformDatasets. + + Service definition for the Maps Platform Datasets API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "mapsplatformdatasets.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "mapsplatformdatasets.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_dataset( + self, + ) -> Callable[ + [maps_platform_datasets.CreateDatasetRequest], Awaitable[gmm_dataset.Dataset] + ]: + r"""Return a callable for the create dataset method over gRPC. + + Create a new dataset for the specified project. + + Returns: + Callable[[~.CreateDatasetRequest], + Awaitable[~.Dataset]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dataset" not in self._stubs: + self._stubs["create_dataset"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets/CreateDataset", + request_serializer=maps_platform_datasets.CreateDatasetRequest.serialize, + response_deserializer=gmm_dataset.Dataset.deserialize, + ) + return self._stubs["create_dataset"] + + @property + def update_dataset_metadata( + self, + ) -> Callable[ + [maps_platform_datasets.UpdateDatasetMetadataRequest], + Awaitable[gmm_dataset.Dataset], + ]: + r"""Return a callable for the update dataset metadata method over gRPC. + + Update the metadata for the dataset. + + Returns: + Callable[[~.UpdateDatasetMetadataRequest], + Awaitable[~.Dataset]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_dataset_metadata" not in self._stubs: + self._stubs["update_dataset_metadata"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets/UpdateDatasetMetadata", + request_serializer=maps_platform_datasets.UpdateDatasetMetadataRequest.serialize, + response_deserializer=gmm_dataset.Dataset.deserialize, + ) + return self._stubs["update_dataset_metadata"] + + @property + def get_dataset( + self, + ) -> Callable[ + [maps_platform_datasets.GetDatasetRequest], Awaitable[dataset.Dataset] + ]: + r"""Return a callable for the get dataset method over gRPC. + + Get the dataset. + + Returns: + Callable[[~.GetDatasetRequest], + Awaitable[~.Dataset]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dataset" not in self._stubs: + self._stubs["get_dataset"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets/GetDataset", + request_serializer=maps_platform_datasets.GetDatasetRequest.serialize, + response_deserializer=dataset.Dataset.deserialize, + ) + return self._stubs["get_dataset"] + + @property + def list_datasets( + self, + ) -> Callable[ + [maps_platform_datasets.ListDatasetsRequest], + Awaitable[maps_platform_datasets.ListDatasetsResponse], + ]: + r"""Return a callable for the list datasets method over gRPC. + + List all the datasets for the specified project. + + Returns: + Callable[[~.ListDatasetsRequest], + Awaitable[~.ListDatasetsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_datasets" not in self._stubs: + self._stubs["list_datasets"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets/ListDatasets", + request_serializer=maps_platform_datasets.ListDatasetsRequest.serialize, + response_deserializer=maps_platform_datasets.ListDatasetsResponse.deserialize, + ) + return self._stubs["list_datasets"] + + @property + def delete_dataset( + self, + ) -> Callable[ + [maps_platform_datasets.DeleteDatasetRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete dataset method over gRPC. + + Delete the specified dataset . + + Returns: + Callable[[~.DeleteDatasetRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dataset" not in self._stubs: + self._stubs["delete_dataset"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets/DeleteDataset", + request_serializer=maps_platform_datasets.DeleteDatasetRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_dataset"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("MapsPlatformDatasetsGrpcAsyncIOTransport",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/rest.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/rest.py new file mode 100644 index 000000000000..e59caff51dbe --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/services/maps_platform_datasets/transports/rest.py @@ -0,0 +1,813 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.maps.mapsplatformdatasets_v1.types import dataset +from google.maps.mapsplatformdatasets_v1.types import dataset as gmm_dataset +from google.maps.mapsplatformdatasets_v1.types import maps_platform_datasets + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import MapsPlatformDatasetsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MapsPlatformDatasetsRestInterceptor: + """Interceptor for MapsPlatformDatasets. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MapsPlatformDatasetsRestTransport. + + .. code-block:: python + class MyCustomMapsPlatformDatasetsInterceptor(MapsPlatformDatasetsRestInterceptor): + def pre_create_dataset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_dataset(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_dataset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_dataset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_dataset(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_datasets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_datasets(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_dataset_metadata(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_dataset_metadata(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MapsPlatformDatasetsRestTransport(interceptor=MyCustomMapsPlatformDatasetsInterceptor()) + client = MapsPlatformDatasetsClient(transport=transport) + + + """ + + def pre_create_dataset( + self, + request: maps_platform_datasets.CreateDatasetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[maps_platform_datasets.CreateDatasetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_dataset + + Override in a subclass to manipulate the request or metadata + before they are sent to the MapsPlatformDatasets server. + """ + return request, metadata + + def post_create_dataset(self, response: gmm_dataset.Dataset) -> gmm_dataset.Dataset: + """Post-rpc interceptor for create_dataset + + Override in a subclass to manipulate the response + after it is returned by the MapsPlatformDatasets server but before + it is returned to user code. + """ + return response + + def pre_delete_dataset( + self, + request: maps_platform_datasets.DeleteDatasetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[maps_platform_datasets.DeleteDatasetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_dataset + + Override in a subclass to manipulate the request or metadata + before they are sent to the MapsPlatformDatasets server. + """ + return request, metadata + + def pre_get_dataset( + self, + request: maps_platform_datasets.GetDatasetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[maps_platform_datasets.GetDatasetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_dataset + + Override in a subclass to manipulate the request or metadata + before they are sent to the MapsPlatformDatasets server. + """ + return request, metadata + + def post_get_dataset(self, response: dataset.Dataset) -> dataset.Dataset: + """Post-rpc interceptor for get_dataset + + Override in a subclass to manipulate the response + after it is returned by the MapsPlatformDatasets server but before + it is returned to user code. + """ + return response + + def pre_list_datasets( + self, + request: maps_platform_datasets.ListDatasetsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[maps_platform_datasets.ListDatasetsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_datasets + + Override in a subclass to manipulate the request or metadata + before they are sent to the MapsPlatformDatasets server. + """ + return request, metadata + + def post_list_datasets( + self, response: maps_platform_datasets.ListDatasetsResponse + ) -> maps_platform_datasets.ListDatasetsResponse: + """Post-rpc interceptor for list_datasets + + Override in a subclass to manipulate the response + after it is returned by the MapsPlatformDatasets server but before + it is returned to user code. + """ + return response + + def pre_update_dataset_metadata( + self, + request: maps_platform_datasets.UpdateDatasetMetadataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + maps_platform_datasets.UpdateDatasetMetadataRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_dataset_metadata + + Override in a subclass to manipulate the request or metadata + before they are sent to the MapsPlatformDatasets server. + """ + return request, metadata + + def post_update_dataset_metadata( + self, response: gmm_dataset.Dataset + ) -> gmm_dataset.Dataset: + """Post-rpc interceptor for update_dataset_metadata + + Override in a subclass to manipulate the response + after it is returned by the MapsPlatformDatasets server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MapsPlatformDatasetsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: MapsPlatformDatasetsRestInterceptor + + +class MapsPlatformDatasetsRestTransport(MapsPlatformDatasetsTransport): + """REST backend transport for MapsPlatformDatasets. + + Service definition for the Maps Platform Datasets API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "mapsplatformdatasets.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MapsPlatformDatasetsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MapsPlatformDatasetsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateDataset(MapsPlatformDatasetsRestStub): + def __hash__(self): + return hash("CreateDataset") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: maps_platform_datasets.CreateDatasetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gmm_dataset.Dataset: + r"""Call the create dataset method over HTTP. + + Args: + request (~.maps_platform_datasets.CreateDatasetRequest): + The request object. Request to create a maps dataset. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gmm_dataset.Dataset: + A representation of a Maps Dataset + resource. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/datasets", + "body": "dataset", + }, + ] + request, metadata = self._interceptor.pre_create_dataset(request, metadata) + pb_request = maps_platform_datasets.CreateDatasetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gmm_dataset.Dataset() + pb_resp = gmm_dataset.Dataset.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_dataset(resp) + return resp + + class _DeleteDataset(MapsPlatformDatasetsRestStub): + def __hash__(self): + return hash("DeleteDataset") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: maps_platform_datasets.DeleteDatasetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete dataset method over HTTP. + + Args: + request (~.maps_platform_datasets.DeleteDatasetRequest): + The request object. Request to delete a dataset. + The dataset to be deleted. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/datasets/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_dataset(request, metadata) + pb_request = maps_platform_datasets.DeleteDatasetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetDataset(MapsPlatformDatasetsRestStub): + def __hash__(self): + return hash("GetDataset") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: maps_platform_datasets.GetDatasetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dataset.Dataset: + r"""Call the get dataset method over HTTP. + + Args: + request (~.maps_platform_datasets.GetDatasetRequest): + The request object. Request to get the specified dataset. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dataset.Dataset: + A representation of a Maps Dataset + resource. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/datasets/*}", + }, + ] + request, metadata = self._interceptor.pre_get_dataset(request, metadata) + pb_request = maps_platform_datasets.GetDatasetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dataset.Dataset() + pb_resp = dataset.Dataset.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_dataset(resp) + return resp + + class _ListDatasets(MapsPlatformDatasetsRestStub): + def __hash__(self): + return hash("ListDatasets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: maps_platform_datasets.ListDatasetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> maps_platform_datasets.ListDatasetsResponse: + r"""Call the list datasets method over HTTP. + + Args: + request (~.maps_platform_datasets.ListDatasetsRequest): + The request object. Request to list datasets for the + project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.maps_platform_datasets.ListDatasetsResponse: + Response to list datasets for the + project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*}/datasets", + }, + ] + request, metadata = self._interceptor.pre_list_datasets(request, metadata) + pb_request = maps_platform_datasets.ListDatasetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = maps_platform_datasets.ListDatasetsResponse() + pb_resp = maps_platform_datasets.ListDatasetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_datasets(resp) + return resp + + class _UpdateDatasetMetadata(MapsPlatformDatasetsRestStub): + def __hash__(self): + return hash("UpdateDatasetMetadata") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: maps_platform_datasets.UpdateDatasetMetadataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gmm_dataset.Dataset: + r"""Call the update dataset metadata method over HTTP. + + Args: + request (~.maps_platform_datasets.UpdateDatasetMetadataRequest): + The request object. Request to update the metadata fields + of the dataset. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gmm_dataset.Dataset: + A representation of a Maps Dataset + resource. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{dataset.name=projects/*/datasets/*}", + "body": "dataset", + }, + ] + request, metadata = self._interceptor.pre_update_dataset_metadata( + request, metadata + ) + pb_request = maps_platform_datasets.UpdateDatasetMetadataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gmm_dataset.Dataset() + pb_resp = gmm_dataset.Dataset.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_dataset_metadata(resp) + return resp + + @property + def create_dataset( + self, + ) -> Callable[[maps_platform_datasets.CreateDatasetRequest], gmm_dataset.Dataset]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDataset(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_dataset( + self, + ) -> Callable[[maps_platform_datasets.DeleteDatasetRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDataset(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_dataset( + self, + ) -> Callable[[maps_platform_datasets.GetDatasetRequest], dataset.Dataset]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataset(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_datasets( + self, + ) -> Callable[ + [maps_platform_datasets.ListDatasetsRequest], + maps_platform_datasets.ListDatasetsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDatasets(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_dataset_metadata( + self, + ) -> Callable[ + [maps_platform_datasets.UpdateDatasetMetadataRequest], gmm_dataset.Dataset + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDatasetMetadata(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("MapsPlatformDatasetsRestTransport",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/__init__.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/__init__.py new file mode 100644 index 000000000000..798ce114b732 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/__init__.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .data_source import FileFormat, GcsSource, LocalFileSource +from .dataset import Dataset, Status, Usage +from .maps_platform_datasets import ( + CreateDatasetRequest, + DeleteDatasetRequest, + GetDatasetRequest, + ListDatasetsRequest, + ListDatasetsResponse, + UpdateDatasetMetadataRequest, +) + +__all__ = ( + "GcsSource", + "LocalFileSource", + "FileFormat", + "Dataset", + "Status", + "Usage", + "CreateDatasetRequest", + "DeleteDatasetRequest", + "GetDatasetRequest", + "ListDatasetsRequest", + "ListDatasetsResponse", + "UpdateDatasetMetadataRequest", +) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/data_source.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/data_source.py new file mode 100644 index 000000000000..57d6c355bdfa --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/data_source.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.mapsplatformdatasets.v1", + manifest={ + "FileFormat", + "LocalFileSource", + "GcsSource", + }, +) + + +class FileFormat(proto.Enum): + r"""The format of the file being uploaded. + + Values: + FILE_FORMAT_UNSPECIFIED (0): + Unspecified file format. + FILE_FORMAT_GEOJSON (1): + GeoJson file. + FILE_FORMAT_KML (2): + KML file. + FILE_FORMAT_CSV (3): + CSV file. + """ + FILE_FORMAT_UNSPECIFIED = 0 + FILE_FORMAT_GEOJSON = 1 + FILE_FORMAT_KML = 2 + FILE_FORMAT_CSV = 3 + + +class LocalFileSource(proto.Message): + r"""The details about the data source when it is a local file. + + Attributes: + filename (str): + The file name of the uploaded file. + file_format (google.maps.mapsplatformdatasets_v1.types.FileFormat): + The format of the file that is being + uploaded. + """ + + filename: str = proto.Field( + proto.STRING, + number=1, + ) + file_format: "FileFormat" = proto.Field( + proto.ENUM, + number=2, + enum="FileFormat", + ) + + +class GcsSource(proto.Message): + r"""The details about the data source when it is in Google Cloud + Storage. + + Attributes: + input_uri (str): + Source data URI. For example, ``gs://my_bucket/my_object``. + file_format (google.maps.mapsplatformdatasets_v1.types.FileFormat): + The file format of the Google Cloud Storage + object. This is used mainly for validation. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + file_format: "FileFormat" = proto.Field( + proto.ENUM, + number=2, + enum="FileFormat", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/dataset.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/dataset.py new file mode 100644 index 000000000000..e69491aa2ec8 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/dataset.py @@ -0,0 +1,233 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.maps.mapsplatformdatasets_v1.types import data_source + +__protobuf__ = proto.module( + package="google.maps.mapsplatformdatasets.v1", + manifest={ + "Usage", + "Dataset", + "Status", + }, +) + + +class Usage(proto.Enum): + r"""Usage specifies where the data is intended to be used to + inform how to process the data. + + Values: + USAGE_UNSPECIFIED (0): + The usage of this dataset is not set. + USAGE_DATA_DRIVEN_STYLING (1): + This dataset will be used for data driven + styling. + """ + USAGE_UNSPECIFIED = 0 + USAGE_DATA_DRIVEN_STYLING = 1 + + +class Dataset(proto.Message): + r"""A representation of a Maps Dataset resource. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Resource name, projects/{project}/datasets/{dataset_id} + display_name (str): + Human readable name, shown in the console UI + . + description (str): + A description of this dataset . + version_id (str): + The version ID of the dataset. + usage (MutableSequence[google.maps.mapsplatformdatasets_v1.types.Usage]): + Specified use case for this dataset. + local_file_source (google.maps.mapsplatformdatasets_v1.types.LocalFileSource): + A local file source for the dataset for a + single upload. + + This field is a member of `oneof`_ ``data_source``. + gcs_source (google.maps.mapsplatformdatasets_v1.types.GcsSource): + A Google Cloud Storage file source for the + dataset for a single upload. + + This field is a member of `oneof`_ ``data_source``. + status (google.maps.mapsplatformdatasets_v1.types.Status): + Output only. The status of this dataset + version. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the dataset was first + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the dataset metadata + was last updated. + version_create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the data was uploaded. + version_description (str): + Output only. The description for this version + of dataset. It is provided when importing data + to the dataset. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + version_id: str = proto.Field( + proto.STRING, + number=4, + ) + usage: MutableSequence["Usage"] = proto.RepeatedField( + proto.ENUM, + number=5, + enum="Usage", + ) + local_file_source: data_source.LocalFileSource = proto.Field( + proto.MESSAGE, + number=6, + oneof="data_source", + message=data_source.LocalFileSource, + ) + gcs_source: data_source.GcsSource = proto.Field( + proto.MESSAGE, + number=7, + oneof="data_source", + message=data_source.GcsSource, + ) + status: "Status" = proto.Field( + proto.MESSAGE, + number=12, + message="Status", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + version_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + version_description: str = proto.Field( + proto.STRING, + number=11, + ) + + +class Status(proto.Message): + r"""Status of the dataset. + + Attributes: + state (google.maps.mapsplatformdatasets_v1.types.Status.State): + State enum for status. + error_message (str): + Error message indicating reason of failure. + It is empty if the datasets is not in a failed + state. + """ + + class State(proto.Enum): + r"""A list of states for the dataset. + + Values: + STATE_UNSPECIFIED (0): + The state of this dataset is not set. + STATE_IMPORTING (1): + Data is being imported to a dataset. + STATE_IMPORT_SUCCEEDED (2): + Importing data to a dataset succeeded. + STATE_IMPORT_FAILED (3): + Importing data to a dataset failed. + STATE_DELETING (4): + The dataset is in the process of getting + deleted. + STATE_DELETION_FAILED (5): + The deletion failed state. This state + represents that dataset deletion has failed. + Deletion may be retried. + STATE_PROCESSING (6): + Data is being processed. + STATE_PROCESSING_FAILED (7): + The processing failed state. This state + represents that processing has failed and may + report errors. + STATE_NEEDS_REVIEW (8): + This state is currently not used. + STATE_PUBLISHING (9): + The publishing state. This state represents + the publishing is in progress. + STATE_PUBLISHING_FAILED (10): + The publishing failed states. This state + represents that the publishing failed. + Publishing may be retried. + STATE_COMPLETED (11): + The completed state. This state represents + the dataset being available for its specific + usage. + """ + STATE_UNSPECIFIED = 0 + STATE_IMPORTING = 1 + STATE_IMPORT_SUCCEEDED = 2 + STATE_IMPORT_FAILED = 3 + STATE_DELETING = 4 + STATE_DELETION_FAILED = 5 + STATE_PROCESSING = 6 + STATE_PROCESSING_FAILED = 7 + STATE_NEEDS_REVIEW = 8 + STATE_PUBLISHING = 9 + STATE_PUBLISHING_FAILED = 10 + STATE_COMPLETED = 11 + + state: State = proto.Field( + proto.ENUM, + number=1, + enum=State, + ) + error_message: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/maps_platform_datasets.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/maps_platform_datasets.py new file mode 100644 index 000000000000..948e954a37f7 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/maps_platform_datasets.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.maps.mapsplatformdatasets_v1.types import dataset as gmm_dataset + +__protobuf__ = proto.module( + package="google.maps.mapsplatformdatasets.v1", + manifest={ + "CreateDatasetRequest", + "UpdateDatasetMetadataRequest", + "GetDatasetRequest", + "ListDatasetsRequest", + "ListDatasetsResponse", + "DeleteDatasetRequest", + }, +) + + +class CreateDatasetRequest(proto.Message): + r"""Request to create a maps dataset. + + Attributes: + parent (str): + Required. Parent project that will own the + dataset. Format: projects/{$project} + dataset (google.maps.mapsplatformdatasets_v1.types.Dataset): + Required. The dataset version to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + dataset: gmm_dataset.Dataset = proto.Field( + proto.MESSAGE, + number=2, + message=gmm_dataset.Dataset, + ) + + +class UpdateDatasetMetadataRequest(proto.Message): + r"""Request to update the metadata fields of the dataset. + + Attributes: + dataset (google.maps.mapsplatformdatasets_v1.types.Dataset): + Required. The dataset to update. The dataset's name is used + to identify the dataset to be updated. The name has the + format: projects/{project}/datasets/{dataset_id} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated. Support the value "*" for + full replacement. + """ + + dataset: gmm_dataset.Dataset = proto.Field( + proto.MESSAGE, + number=1, + message=gmm_dataset.Dataset, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetDatasetRequest(proto.Message): + r"""Request to get the specified dataset. + + Attributes: + name (str): + Required. Resource name. + projects/{project}/datasets/{dataset_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDatasetsRequest(proto.Message): + r"""Request to list datasets for the project. + + Attributes: + parent (str): + Required. The name of the project to list all + the datasets for. + page_size (int): + The maximum number of versions to return per + page. If unspecified (or zero), all datasets + will be returned. + page_token (str): + The page token, received from a previous + ListDatasets call. Provide this to retrieve the + subsequent page. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDatasetsResponse(proto.Message): + r"""Response to list datasets for the project. + + Attributes: + datasets (MutableSequence[google.maps.mapsplatformdatasets_v1.types.Dataset]): + All the datasets for the project. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + datasets: MutableSequence[gmm_dataset.Dataset] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gmm_dataset.Dataset, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteDatasetRequest(proto.Message): + r"""Request to delete a dataset. + The dataset to be deleted. + + Attributes: + name (str): + Required. Format: projects/${project}/datasets/{dataset_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/maps_platform_datasets_service.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/maps_platform_datasets_service.py new file mode 100644 index 000000000000..4e9d8c7af1e4 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1/types/maps_platform_datasets_service.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.mapsplatformdatasets.v1", + manifest={}, +) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/__init__.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/__init__.py new file mode 100644 index 000000000000..744eb69d8e50 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/__init__.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.maps.mapsplatformdatasets_v1alpha import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.maps_platform_datasets_v1_alpha import ( + MapsPlatformDatasetsV1AlphaAsyncClient, + MapsPlatformDatasetsV1AlphaClient, +) +from .types.data_source import FileFormat, GcsSource, LocalFileSource +from .types.dataset import Dataset, State, Usage +from .types.maps_platform_datasets import ( + CreateDatasetRequest, + DeleteDatasetRequest, + DeleteDatasetVersionRequest, + GetDatasetRequest, + ListDatasetsRequest, + ListDatasetsResponse, + ListDatasetVersionsRequest, + ListDatasetVersionsResponse, + UpdateDatasetMetadataRequest, +) + +__all__ = ( + "MapsPlatformDatasetsV1AlphaAsyncClient", + "CreateDatasetRequest", + "Dataset", + "DeleteDatasetRequest", + "DeleteDatasetVersionRequest", + "FileFormat", + "GcsSource", + "GetDatasetRequest", + "ListDatasetVersionsRequest", + "ListDatasetVersionsResponse", + "ListDatasetsRequest", + "ListDatasetsResponse", + "LocalFileSource", + "MapsPlatformDatasetsV1AlphaClient", + "State", + "UpdateDatasetMetadataRequest", + "Usage", +) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/gapic_metadata.json b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/gapic_metadata.json new file mode 100644 index 000000000000..63cd53b2c8d7 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/gapic_metadata.json @@ -0,0 +1,133 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.maps.mapsplatformdatasets_v1alpha", + "protoPackage": "google.maps.mapsplatformdatasets.v1alpha", + "schema": "1.0", + "services": { + "MapsPlatformDatasetsV1Alpha": { + "clients": { + "grpc": { + "libraryClient": "MapsPlatformDatasetsV1AlphaClient", + "rpcs": { + "CreateDataset": { + "methods": [ + "create_dataset" + ] + }, + "DeleteDataset": { + "methods": [ + "delete_dataset" + ] + }, + "DeleteDatasetVersion": { + "methods": [ + "delete_dataset_version" + ] + }, + "GetDataset": { + "methods": [ + "get_dataset" + ] + }, + "ListDatasetVersions": { + "methods": [ + "list_dataset_versions" + ] + }, + "ListDatasets": { + "methods": [ + "list_datasets" + ] + }, + "UpdateDatasetMetadata": { + "methods": [ + "update_dataset_metadata" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MapsPlatformDatasetsV1AlphaAsyncClient", + "rpcs": { + "CreateDataset": { + "methods": [ + "create_dataset" + ] + }, + "DeleteDataset": { + "methods": [ + "delete_dataset" + ] + }, + "DeleteDatasetVersion": { + "methods": [ + "delete_dataset_version" + ] + }, + "GetDataset": { + "methods": [ + "get_dataset" + ] + }, + "ListDatasetVersions": { + "methods": [ + "list_dataset_versions" + ] + }, + "ListDatasets": { + "methods": [ + "list_datasets" + ] + }, + "UpdateDatasetMetadata": { + "methods": [ + "update_dataset_metadata" + ] + } + } + }, + "rest": { + "libraryClient": "MapsPlatformDatasetsV1AlphaClient", + "rpcs": { + "CreateDataset": { + "methods": [ + "create_dataset" + ] + }, + "DeleteDataset": { + "methods": [ + "delete_dataset" + ] + }, + "DeleteDatasetVersion": { + "methods": [ + "delete_dataset_version" + ] + }, + "GetDataset": { + "methods": [ + "get_dataset" + ] + }, + "ListDatasetVersions": { + "methods": [ + "list_dataset_versions" + ] + }, + "ListDatasets": { + "methods": [ + "list_datasets" + ] + }, + "UpdateDatasetMetadata": { + "methods": [ + "update_dataset_metadata" + ] + } + } + } + } + } + } +} diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/gapic_version.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/gapic_version.py new file mode 100644 index 000000000000..a6ccd663d16f --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.3.0" # {x-release-please-version} diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/py.typed b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/py.typed new file mode 100644 index 000000000000..b186faa8f14d --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-maps-mapsplatformdatasets package uses inline types. diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/__init__.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/__init__.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/__init__.py new file mode 100644 index 000000000000..840add9451b2 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import MapsPlatformDatasetsV1AlphaAsyncClient +from .client import MapsPlatformDatasetsV1AlphaClient + +__all__ = ( + "MapsPlatformDatasetsV1AlphaClient", + "MapsPlatformDatasetsV1AlphaAsyncClient", +) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/async_client.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/async_client.py new file mode 100644 index 000000000000..02c4ca937d13 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/async_client.py @@ -0,0 +1,1028 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.maps.mapsplatformdatasets_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha import ( + pagers, +) +from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset +from google.maps.mapsplatformdatasets_v1alpha.types import maps_platform_datasets +from google.maps.mapsplatformdatasets_v1alpha.types import data_source +from google.maps.mapsplatformdatasets_v1alpha.types import dataset + +from .client import MapsPlatformDatasetsV1AlphaClient +from .transports.base import DEFAULT_CLIENT_INFO, MapsPlatformDatasetsV1AlphaTransport +from .transports.grpc_asyncio import MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport + + +class MapsPlatformDatasetsV1AlphaAsyncClient: + """Service definition for the Maps Platform Datasets API.""" + + _client: MapsPlatformDatasetsV1AlphaClient + + DEFAULT_ENDPOINT = MapsPlatformDatasetsV1AlphaClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MapsPlatformDatasetsV1AlphaClient.DEFAULT_MTLS_ENDPOINT + + dataset_path = staticmethod(MapsPlatformDatasetsV1AlphaClient.dataset_path) + parse_dataset_path = staticmethod( + MapsPlatformDatasetsV1AlphaClient.parse_dataset_path + ) + common_billing_account_path = staticmethod( + MapsPlatformDatasetsV1AlphaClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MapsPlatformDatasetsV1AlphaClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + MapsPlatformDatasetsV1AlphaClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + MapsPlatformDatasetsV1AlphaClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + MapsPlatformDatasetsV1AlphaClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + MapsPlatformDatasetsV1AlphaClient.parse_common_organization_path + ) + common_project_path = staticmethod( + MapsPlatformDatasetsV1AlphaClient.common_project_path + ) + parse_common_project_path = staticmethod( + MapsPlatformDatasetsV1AlphaClient.parse_common_project_path + ) + common_location_path = staticmethod( + MapsPlatformDatasetsV1AlphaClient.common_location_path + ) + parse_common_location_path = staticmethod( + MapsPlatformDatasetsV1AlphaClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MapsPlatformDatasetsV1AlphaAsyncClient: The constructed client. + """ + return MapsPlatformDatasetsV1AlphaClient.from_service_account_info.__func__(MapsPlatformDatasetsV1AlphaAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MapsPlatformDatasetsV1AlphaAsyncClient: The constructed client. + """ + return MapsPlatformDatasetsV1AlphaClient.from_service_account_file.__func__(MapsPlatformDatasetsV1AlphaAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MapsPlatformDatasetsV1AlphaClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MapsPlatformDatasetsV1AlphaTransport: + """Returns the transport used by the client instance. + + Returns: + MapsPlatformDatasetsV1AlphaTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(MapsPlatformDatasetsV1AlphaClient).get_transport_class, + type(MapsPlatformDatasetsV1AlphaClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, MapsPlatformDatasetsV1AlphaTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the maps platform datasets v1 alpha client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.MapsPlatformDatasetsV1AlphaTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MapsPlatformDatasetsV1AlphaClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_dataset( + self, + request: Optional[ + Union[maps_platform_datasets.CreateDatasetRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + dataset: Optional[gmm_dataset.Dataset] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gmm_dataset.Dataset: + r"""Create a new dataset for the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1alpha + + async def sample_create_dataset(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.CreateDatasetRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_dataset(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.maps.mapsplatformdatasets_v1alpha.types.CreateDatasetRequest, dict]]): + The request object. Request to create a maps dataset. + parent (:class:`str`): + Required. Parent project that will own the dataset. + Format: projects/{$project_number} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dataset (:class:`google.maps.mapsplatformdatasets_v1alpha.types.Dataset`): + Required. The dataset version to + create. + + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1alpha.types.Dataset: + A representation of a maps platform + dataset. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, dataset]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = maps_platform_datasets.CreateDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if dataset is not None: + request.dataset = dataset + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_dataset, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_dataset_metadata( + self, + request: Optional[ + Union[maps_platform_datasets.UpdateDatasetMetadataRequest, dict] + ] = None, + *, + dataset: Optional[gmm_dataset.Dataset] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gmm_dataset.Dataset: + r"""Update the metadata for the dataset. To update the + data use: UploadDataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1alpha + + async def sample_update_dataset_metadata(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.UpdateDatasetMetadataRequest( + ) + + # Make the request + response = await client.update_dataset_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.maps.mapsplatformdatasets_v1alpha.types.UpdateDatasetMetadataRequest, dict]]): + The request object. Request to update the metadata fields + of the dataset. + dataset (:class:`google.maps.mapsplatformdatasets_v1alpha.types.Dataset`): + Required. The dataset to update. The dataset's name is + used to identify the dataset to be updated. The name has + the format: projects/{project}/datasets/{dataset_id} + + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to be updated. Support the value "*" + for full replacement. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1alpha.types.Dataset: + A representation of a maps platform + dataset. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([dataset, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = maps_platform_datasets.UpdateDatasetMetadataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dataset is not None: + request.dataset = dataset + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_dataset_metadata, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dataset.name", request.dataset.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_dataset( + self, + request: Optional[Union[maps_platform_datasets.GetDatasetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dataset.Dataset: + r"""Get the published or latest version of the dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1alpha + + async def sample_get_dataset(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.GetDatasetRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dataset(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.maps.mapsplatformdatasets_v1alpha.types.GetDatasetRequest, dict]]): + The request object. Request to get the specified dataset. + name (:class:`str`): + Required. Resource name. Can also fetch a specified + version projects/{project}/datasets/{dataset_id} + projects/{project}/datasets/{dataset_id}@{version-id} + + In order to retrieve a previous version of the dataset, + also provide the version ID. Example: + projects/123/datasets/assisted-driving-preferences@c7cfa2a8 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1alpha.types.Dataset: + A representation of a maps platform + dataset. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = maps_platform_datasets.GetDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_dataset, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_dataset_versions( + self, + request: Optional[ + Union[maps_platform_datasets.ListDatasetVersionsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatasetVersionsAsyncPager: + r"""List all the versions of a dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1alpha + + async def sample_list_dataset_versions(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.ListDatasetVersionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_dataset_versions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsRequest, dict]]): + The request object. Request to list of all versions of + the dataset. + name (:class:`str`): + Required. The name of the dataset to + list all the versions for. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetVersionsAsyncPager: + Response with list of all versions of + the dataset. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = maps_platform_datasets.ListDatasetVersionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_dataset_versions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatasetVersionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_datasets( + self, + request: Optional[ + Union[maps_platform_datasets.ListDatasetsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatasetsAsyncPager: + r"""List all the datasets for the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1alpha + + async def sample_list_datasets(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.ListDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_datasets(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsRequest, dict]]): + The request object. Request to list datasets for the + project. + parent (:class:`str`): + Required. The name of the project to + list all the datasets for. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetsAsyncPager: + Response to list datasets for the + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = maps_platform_datasets.ListDatasetsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_datasets, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatasetsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_dataset( + self, + request: Optional[ + Union[maps_platform_datasets.DeleteDatasetRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete the specified dataset and optionally all its + corresponding versions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1alpha + + async def sample_delete_dataset(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.DeleteDatasetRequest( + name="name_value", + ) + + # Make the request + await client.delete_dataset(request=request) + + Args: + request (Optional[Union[google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetRequest, dict]]): + The request object. Request to delete a dataset. + The dataset to be deleted. + name (:class:`str`): + Required. Format: + projects/${project}/datasets/{dataset_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = maps_platform_datasets.DeleteDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_dataset, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def delete_dataset_version( + self, + request: Optional[ + Union[maps_platform_datasets.DeleteDatasetVersionRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a specific version of the dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1alpha + + async def sample_delete_dataset_version(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.DeleteDatasetVersionRequest( + name="name_value", + ) + + # Make the request + await client.delete_dataset_version(request=request) + + Args: + request (Optional[Union[google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetVersionRequest, dict]]): + The request object. Request to delete a version of a + dataset. + name (:class:`str`): + Required. Format: + projects/${project}/datasets/{dataset_id}@{version-id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = maps_platform_datasets.DeleteDatasetVersionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_dataset_version, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MapsPlatformDatasetsV1AlphaAsyncClient",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/client.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/client.py new file mode 100644 index 000000000000..8d498661ba4f --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/client.py @@ -0,0 +1,1227 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.maps.mapsplatformdatasets_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha import ( + pagers, +) +from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset +from google.maps.mapsplatformdatasets_v1alpha.types import maps_platform_datasets +from google.maps.mapsplatformdatasets_v1alpha.types import data_source +from google.maps.mapsplatformdatasets_v1alpha.types import dataset + +from .transports.base import DEFAULT_CLIENT_INFO, MapsPlatformDatasetsV1AlphaTransport +from .transports.grpc import MapsPlatformDatasetsV1AlphaGrpcTransport +from .transports.grpc_asyncio import MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport +from .transports.rest import MapsPlatformDatasetsV1AlphaRestTransport + + +class MapsPlatformDatasetsV1AlphaClientMeta(type): + """Metaclass for the MapsPlatformDatasetsV1Alpha client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[MapsPlatformDatasetsV1AlphaTransport]] + _transport_registry["grpc"] = MapsPlatformDatasetsV1AlphaGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport + _transport_registry["rest"] = MapsPlatformDatasetsV1AlphaRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MapsPlatformDatasetsV1AlphaTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MapsPlatformDatasetsV1AlphaClient( + metaclass=MapsPlatformDatasetsV1AlphaClientMeta +): + """Service definition for the Maps Platform Datasets API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "mapsplatformdatasets.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MapsPlatformDatasetsV1AlphaClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MapsPlatformDatasetsV1AlphaClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MapsPlatformDatasetsV1AlphaTransport: + """Returns the transport used by the client instance. + + Returns: + MapsPlatformDatasetsV1AlphaTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def dataset_path( + project: str, + dataset: str, + ) -> str: + """Returns a fully-qualified dataset string.""" + return "projects/{project}/datasets/{dataset}".format( + project=project, + dataset=dataset, + ) + + @staticmethod + def parse_dataset_path(path: str) -> Dict[str, str]: + """Parses a dataset path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MapsPlatformDatasetsV1AlphaTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the maps platform datasets v1 alpha client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, MapsPlatformDatasetsV1AlphaTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MapsPlatformDatasetsV1AlphaTransport): + # transport is a MapsPlatformDatasetsV1AlphaTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_dataset( + self, + request: Optional[ + Union[maps_platform_datasets.CreateDatasetRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + dataset: Optional[gmm_dataset.Dataset] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gmm_dataset.Dataset: + r"""Create a new dataset for the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1alpha + + def sample_create_dataset(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.CreateDatasetRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_dataset(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.maps.mapsplatformdatasets_v1alpha.types.CreateDatasetRequest, dict]): + The request object. Request to create a maps dataset. + parent (str): + Required. Parent project that will own the dataset. + Format: projects/{$project_number} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dataset (google.maps.mapsplatformdatasets_v1alpha.types.Dataset): + Required. The dataset version to + create. + + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1alpha.types.Dataset: + A representation of a maps platform + dataset. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, dataset]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a maps_platform_datasets.CreateDatasetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, maps_platform_datasets.CreateDatasetRequest): + request = maps_platform_datasets.CreateDatasetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if dataset is not None: + request.dataset = dataset + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_dataset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_dataset_metadata( + self, + request: Optional[ + Union[maps_platform_datasets.UpdateDatasetMetadataRequest, dict] + ] = None, + *, + dataset: Optional[gmm_dataset.Dataset] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gmm_dataset.Dataset: + r"""Update the metadata for the dataset. To update the + data use: UploadDataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1alpha + + def sample_update_dataset_metadata(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.UpdateDatasetMetadataRequest( + ) + + # Make the request + response = client.update_dataset_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.maps.mapsplatformdatasets_v1alpha.types.UpdateDatasetMetadataRequest, dict]): + The request object. Request to update the metadata fields + of the dataset. + dataset (google.maps.mapsplatformdatasets_v1alpha.types.Dataset): + Required. The dataset to update. The dataset's name is + used to identify the dataset to be updated. The name has + the format: projects/{project}/datasets/{dataset_id} + + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated. Support the value "*" + for full replacement. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1alpha.types.Dataset: + A representation of a maps platform + dataset. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([dataset, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a maps_platform_datasets.UpdateDatasetMetadataRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, maps_platform_datasets.UpdateDatasetMetadataRequest): + request = maps_platform_datasets.UpdateDatasetMetadataRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dataset is not None: + request.dataset = dataset + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_dataset_metadata] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dataset.name", request.dataset.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_dataset( + self, + request: Optional[Union[maps_platform_datasets.GetDatasetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dataset.Dataset: + r"""Get the published or latest version of the dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1alpha + + def sample_get_dataset(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.GetDatasetRequest( + name="name_value", + ) + + # Make the request + response = client.get_dataset(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.maps.mapsplatformdatasets_v1alpha.types.GetDatasetRequest, dict]): + The request object. Request to get the specified dataset. + name (str): + Required. Resource name. Can also fetch a specified + version projects/{project}/datasets/{dataset_id} + projects/{project}/datasets/{dataset_id}@{version-id} + + In order to retrieve a previous version of the dataset, + also provide the version ID. Example: + projects/123/datasets/assisted-driving-preferences@c7cfa2a8 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1alpha.types.Dataset: + A representation of a maps platform + dataset. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a maps_platform_datasets.GetDatasetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, maps_platform_datasets.GetDatasetRequest): + request = maps_platform_datasets.GetDatasetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dataset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_dataset_versions( + self, + request: Optional[ + Union[maps_platform_datasets.ListDatasetVersionsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatasetVersionsPager: + r"""List all the versions of a dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1alpha + + def sample_list_dataset_versions(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.ListDatasetVersionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_dataset_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsRequest, dict]): + The request object. Request to list of all versions of + the dataset. + name (str): + Required. The name of the dataset to + list all the versions for. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetVersionsPager: + Response with list of all versions of + the dataset. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a maps_platform_datasets.ListDatasetVersionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, maps_platform_datasets.ListDatasetVersionsRequest): + request = maps_platform_datasets.ListDatasetVersionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_dataset_versions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatasetVersionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_datasets( + self, + request: Optional[ + Union[maps_platform_datasets.ListDatasetsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatasetsPager: + r"""List all the datasets for the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1alpha + + def sample_list_datasets(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.ListDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_datasets(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsRequest, dict]): + The request object. Request to list datasets for the + project. + parent (str): + Required. The name of the project to + list all the datasets for. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetsPager: + Response to list datasets for the + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a maps_platform_datasets.ListDatasetsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, maps_platform_datasets.ListDatasetsRequest): + request = maps_platform_datasets.ListDatasetsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_datasets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatasetsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_dataset( + self, + request: Optional[ + Union[maps_platform_datasets.DeleteDatasetRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete the specified dataset and optionally all its + corresponding versions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1alpha + + def sample_delete_dataset(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.DeleteDatasetRequest( + name="name_value", + ) + + # Make the request + client.delete_dataset(request=request) + + Args: + request (Union[google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetRequest, dict]): + The request object. Request to delete a dataset. + The dataset to be deleted. + name (str): + Required. Format: + projects/${project}/datasets/{dataset_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a maps_platform_datasets.DeleteDatasetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, maps_platform_datasets.DeleteDatasetRequest): + request = maps_platform_datasets.DeleteDatasetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_dataset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def delete_dataset_version( + self, + request: Optional[ + Union[maps_platform_datasets.DeleteDatasetVersionRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a specific version of the dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import mapsplatformdatasets_v1alpha + + def sample_delete_dataset_version(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.DeleteDatasetVersionRequest( + name="name_value", + ) + + # Make the request + client.delete_dataset_version(request=request) + + Args: + request (Union[google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetVersionRequest, dict]): + The request object. Request to delete a version of a + dataset. + name (str): + Required. Format: + projects/${project}/datasets/{dataset_id}@{version-id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a maps_platform_datasets.DeleteDatasetVersionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, maps_platform_datasets.DeleteDatasetVersionRequest): + request = maps_platform_datasets.DeleteDatasetVersionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_dataset_version] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "MapsPlatformDatasetsV1AlphaClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MapsPlatformDatasetsV1AlphaClient",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/pagers.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/pagers.py new file mode 100644 index 000000000000..860ed372816f --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/pagers.py @@ -0,0 +1,290 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.maps.mapsplatformdatasets_v1alpha.types import ( + dataset, + maps_platform_datasets, +) + + +class ListDatasetVersionsPager: + """A pager for iterating through ``list_dataset_versions`` requests. + + This class thinly wraps an initial + :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``datasets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatasetVersions`` requests and continue to iterate + through the ``datasets`` field on the + corresponding responses. + + All the usual :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., maps_platform_datasets.ListDatasetVersionsResponse], + request: maps_platform_datasets.ListDatasetVersionsRequest, + response: maps_platform_datasets.ListDatasetVersionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsRequest): + The initial request object. + response (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = maps_platform_datasets.ListDatasetVersionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[maps_platform_datasets.ListDatasetVersionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dataset.Dataset]: + for page in self.pages: + yield from page.datasets + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDatasetVersionsAsyncPager: + """A pager for iterating through ``list_dataset_versions`` requests. + + This class thinly wraps an initial + :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``datasets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatasetVersions`` requests and continue to iterate + through the ``datasets`` field on the + corresponding responses. + + All the usual :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[maps_platform_datasets.ListDatasetVersionsResponse] + ], + request: maps_platform_datasets.ListDatasetVersionsRequest, + response: maps_platform_datasets.ListDatasetVersionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsRequest): + The initial request object. + response (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = maps_platform_datasets.ListDatasetVersionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[maps_platform_datasets.ListDatasetVersionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[dataset.Dataset]: + async def async_generator(): + async for page in self.pages: + for response in page.datasets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDatasetsPager: + """A pager for iterating through ``list_datasets`` requests. + + This class thinly wraps an initial + :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``datasets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatasets`` requests and continue to iterate + through the ``datasets`` field on the + corresponding responses. + + All the usual :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., maps_platform_datasets.ListDatasetsResponse], + request: maps_platform_datasets.ListDatasetsRequest, + response: maps_platform_datasets.ListDatasetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsRequest): + The initial request object. + response (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = maps_platform_datasets.ListDatasetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[maps_platform_datasets.ListDatasetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dataset.Dataset]: + for page in self.pages: + yield from page.datasets + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDatasetsAsyncPager: + """A pager for iterating through ``list_datasets`` requests. + + This class thinly wraps an initial + :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``datasets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatasets`` requests and continue to iterate + through the ``datasets`` field on the + corresponding responses. + + All the usual :class:`google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[maps_platform_datasets.ListDatasetsResponse]], + request: maps_platform_datasets.ListDatasetsRequest, + response: maps_platform_datasets.ListDatasetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsRequest): + The initial request object. + response (google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = maps_platform_datasets.ListDatasetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[maps_platform_datasets.ListDatasetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[dataset.Dataset]: + async def async_generator(): + async for page in self.pages: + for response in page.datasets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/__init__.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/__init__.py new file mode 100644 index 000000000000..9afe352f0f4b --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MapsPlatformDatasetsV1AlphaTransport +from .grpc import MapsPlatformDatasetsV1AlphaGrpcTransport +from .grpc_asyncio import MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport +from .rest import ( + MapsPlatformDatasetsV1AlphaRestInterceptor, + MapsPlatformDatasetsV1AlphaRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[MapsPlatformDatasetsV1AlphaTransport]] +_transport_registry["grpc"] = MapsPlatformDatasetsV1AlphaGrpcTransport +_transport_registry["grpc_asyncio"] = MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport +_transport_registry["rest"] = MapsPlatformDatasetsV1AlphaRestTransport + +__all__ = ( + "MapsPlatformDatasetsV1AlphaTransport", + "MapsPlatformDatasetsV1AlphaGrpcTransport", + "MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport", + "MapsPlatformDatasetsV1AlphaRestTransport", + "MapsPlatformDatasetsV1AlphaRestInterceptor", +) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/base.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/base.py new file mode 100644 index 000000000000..8fe8dba90ec9 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/base.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.maps.mapsplatformdatasets_v1alpha import gapic_version as package_version +from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset +from google.maps.mapsplatformdatasets_v1alpha.types import maps_platform_datasets +from google.maps.mapsplatformdatasets_v1alpha.types import dataset + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class MapsPlatformDatasetsV1AlphaTransport(abc.ABC): + """Abstract transport class for MapsPlatformDatasetsV1Alpha.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "mapsplatformdatasets.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_dataset: gapic_v1.method.wrap_method( + self.create_dataset, + default_timeout=60.0, + client_info=client_info, + ), + self.update_dataset_metadata: gapic_v1.method.wrap_method( + self.update_dataset_metadata, + default_timeout=60.0, + client_info=client_info, + ), + self.get_dataset: gapic_v1.method.wrap_method( + self.get_dataset, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_dataset_versions: gapic_v1.method.wrap_method( + self.list_dataset_versions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_datasets: gapic_v1.method.wrap_method( + self.list_datasets, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_dataset: gapic_v1.method.wrap_method( + self.delete_dataset, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_dataset_version: gapic_v1.method.wrap_method( + self.delete_dataset_version, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_dataset( + self, + ) -> Callable[ + [maps_platform_datasets.CreateDatasetRequest], + Union[gmm_dataset.Dataset, Awaitable[gmm_dataset.Dataset]], + ]: + raise NotImplementedError() + + @property + def update_dataset_metadata( + self, + ) -> Callable[ + [maps_platform_datasets.UpdateDatasetMetadataRequest], + Union[gmm_dataset.Dataset, Awaitable[gmm_dataset.Dataset]], + ]: + raise NotImplementedError() + + @property + def get_dataset( + self, + ) -> Callable[ + [maps_platform_datasets.GetDatasetRequest], + Union[dataset.Dataset, Awaitable[dataset.Dataset]], + ]: + raise NotImplementedError() + + @property + def list_dataset_versions( + self, + ) -> Callable[ + [maps_platform_datasets.ListDatasetVersionsRequest], + Union[ + maps_platform_datasets.ListDatasetVersionsResponse, + Awaitable[maps_platform_datasets.ListDatasetVersionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_datasets( + self, + ) -> Callable[ + [maps_platform_datasets.ListDatasetsRequest], + Union[ + maps_platform_datasets.ListDatasetsResponse, + Awaitable[maps_platform_datasets.ListDatasetsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_dataset( + self, + ) -> Callable[ + [maps_platform_datasets.DeleteDatasetRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def delete_dataset_version( + self, + ) -> Callable[ + [maps_platform_datasets.DeleteDatasetVersionRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("MapsPlatformDatasetsV1AlphaTransport",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/grpc.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/grpc.py new file mode 100644 index 000000000000..9cdd4537cbf4 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/grpc.py @@ -0,0 +1,436 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset +from google.maps.mapsplatformdatasets_v1alpha.types import maps_platform_datasets +from google.maps.mapsplatformdatasets_v1alpha.types import dataset + +from .base import DEFAULT_CLIENT_INFO, MapsPlatformDatasetsV1AlphaTransport + + +class MapsPlatformDatasetsV1AlphaGrpcTransport(MapsPlatformDatasetsV1AlphaTransport): + """gRPC backend transport for MapsPlatformDatasetsV1Alpha. + + Service definition for the Maps Platform Datasets API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "mapsplatformdatasets.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "mapsplatformdatasets.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_dataset( + self, + ) -> Callable[[maps_platform_datasets.CreateDatasetRequest], gmm_dataset.Dataset]: + r"""Return a callable for the create dataset method over gRPC. + + Create a new dataset for the specified project. + + Returns: + Callable[[~.CreateDatasetRequest], + ~.Dataset]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dataset" not in self._stubs: + self._stubs["create_dataset"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/CreateDataset", + request_serializer=maps_platform_datasets.CreateDatasetRequest.serialize, + response_deserializer=gmm_dataset.Dataset.deserialize, + ) + return self._stubs["create_dataset"] + + @property + def update_dataset_metadata( + self, + ) -> Callable[ + [maps_platform_datasets.UpdateDatasetMetadataRequest], gmm_dataset.Dataset + ]: + r"""Return a callable for the update dataset metadata method over gRPC. + + Update the metadata for the dataset. To update the + data use: UploadDataset. + + Returns: + Callable[[~.UpdateDatasetMetadataRequest], + ~.Dataset]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_dataset_metadata" not in self._stubs: + self._stubs["update_dataset_metadata"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/UpdateDatasetMetadata", + request_serializer=maps_platform_datasets.UpdateDatasetMetadataRequest.serialize, + response_deserializer=gmm_dataset.Dataset.deserialize, + ) + return self._stubs["update_dataset_metadata"] + + @property + def get_dataset( + self, + ) -> Callable[[maps_platform_datasets.GetDatasetRequest], dataset.Dataset]: + r"""Return a callable for the get dataset method over gRPC. + + Get the published or latest version of the dataset. + + Returns: + Callable[[~.GetDatasetRequest], + ~.Dataset]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dataset" not in self._stubs: + self._stubs["get_dataset"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/GetDataset", + request_serializer=maps_platform_datasets.GetDatasetRequest.serialize, + response_deserializer=dataset.Dataset.deserialize, + ) + return self._stubs["get_dataset"] + + @property + def list_dataset_versions( + self, + ) -> Callable[ + [maps_platform_datasets.ListDatasetVersionsRequest], + maps_platform_datasets.ListDatasetVersionsResponse, + ]: + r"""Return a callable for the list dataset versions method over gRPC. + + List all the versions of a dataset. + + Returns: + Callable[[~.ListDatasetVersionsRequest], + ~.ListDatasetVersionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dataset_versions" not in self._stubs: + self._stubs["list_dataset_versions"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/ListDatasetVersions", + request_serializer=maps_platform_datasets.ListDatasetVersionsRequest.serialize, + response_deserializer=maps_platform_datasets.ListDatasetVersionsResponse.deserialize, + ) + return self._stubs["list_dataset_versions"] + + @property + def list_datasets( + self, + ) -> Callable[ + [maps_platform_datasets.ListDatasetsRequest], + maps_platform_datasets.ListDatasetsResponse, + ]: + r"""Return a callable for the list datasets method over gRPC. + + List all the datasets for the specified project. + + Returns: + Callable[[~.ListDatasetsRequest], + ~.ListDatasetsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_datasets" not in self._stubs: + self._stubs["list_datasets"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/ListDatasets", + request_serializer=maps_platform_datasets.ListDatasetsRequest.serialize, + response_deserializer=maps_platform_datasets.ListDatasetsResponse.deserialize, + ) + return self._stubs["list_datasets"] + + @property + def delete_dataset( + self, + ) -> Callable[[maps_platform_datasets.DeleteDatasetRequest], empty_pb2.Empty]: + r"""Return a callable for the delete dataset method over gRPC. + + Delete the specified dataset and optionally all its + corresponding versions. + + Returns: + Callable[[~.DeleteDatasetRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dataset" not in self._stubs: + self._stubs["delete_dataset"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/DeleteDataset", + request_serializer=maps_platform_datasets.DeleteDatasetRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_dataset"] + + @property + def delete_dataset_version( + self, + ) -> Callable[ + [maps_platform_datasets.DeleteDatasetVersionRequest], empty_pb2.Empty + ]: + r"""Return a callable for the delete dataset version method over gRPC. + + Delete a specific version of the dataset. + + Returns: + Callable[[~.DeleteDatasetVersionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dataset_version" not in self._stubs: + self._stubs["delete_dataset_version"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/DeleteDatasetVersion", + request_serializer=maps_platform_datasets.DeleteDatasetVersionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_dataset_version"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("MapsPlatformDatasetsV1AlphaGrpcTransport",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/grpc_asyncio.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/grpc_asyncio.py new file mode 100644 index 000000000000..efdde2d9737c --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/grpc_asyncio.py @@ -0,0 +1,444 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset +from google.maps.mapsplatformdatasets_v1alpha.types import maps_platform_datasets +from google.maps.mapsplatformdatasets_v1alpha.types import dataset + +from .base import DEFAULT_CLIENT_INFO, MapsPlatformDatasetsV1AlphaTransport +from .grpc import MapsPlatformDatasetsV1AlphaGrpcTransport + + +class MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport( + MapsPlatformDatasetsV1AlphaTransport +): + """gRPC AsyncIO backend transport for MapsPlatformDatasetsV1Alpha. + + Service definition for the Maps Platform Datasets API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "mapsplatformdatasets.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "mapsplatformdatasets.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_dataset( + self, + ) -> Callable[ + [maps_platform_datasets.CreateDatasetRequest], Awaitable[gmm_dataset.Dataset] + ]: + r"""Return a callable for the create dataset method over gRPC. + + Create a new dataset for the specified project. + + Returns: + Callable[[~.CreateDatasetRequest], + Awaitable[~.Dataset]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dataset" not in self._stubs: + self._stubs["create_dataset"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/CreateDataset", + request_serializer=maps_platform_datasets.CreateDatasetRequest.serialize, + response_deserializer=gmm_dataset.Dataset.deserialize, + ) + return self._stubs["create_dataset"] + + @property + def update_dataset_metadata( + self, + ) -> Callable[ + [maps_platform_datasets.UpdateDatasetMetadataRequest], + Awaitable[gmm_dataset.Dataset], + ]: + r"""Return a callable for the update dataset metadata method over gRPC. + + Update the metadata for the dataset. To update the + data use: UploadDataset. + + Returns: + Callable[[~.UpdateDatasetMetadataRequest], + Awaitable[~.Dataset]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_dataset_metadata" not in self._stubs: + self._stubs["update_dataset_metadata"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/UpdateDatasetMetadata", + request_serializer=maps_platform_datasets.UpdateDatasetMetadataRequest.serialize, + response_deserializer=gmm_dataset.Dataset.deserialize, + ) + return self._stubs["update_dataset_metadata"] + + @property + def get_dataset( + self, + ) -> Callable[ + [maps_platform_datasets.GetDatasetRequest], Awaitable[dataset.Dataset] + ]: + r"""Return a callable for the get dataset method over gRPC. + + Get the published or latest version of the dataset. + + Returns: + Callable[[~.GetDatasetRequest], + Awaitable[~.Dataset]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dataset" not in self._stubs: + self._stubs["get_dataset"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/GetDataset", + request_serializer=maps_platform_datasets.GetDatasetRequest.serialize, + response_deserializer=dataset.Dataset.deserialize, + ) + return self._stubs["get_dataset"] + + @property + def list_dataset_versions( + self, + ) -> Callable[ + [maps_platform_datasets.ListDatasetVersionsRequest], + Awaitable[maps_platform_datasets.ListDatasetVersionsResponse], + ]: + r"""Return a callable for the list dataset versions method over gRPC. + + List all the versions of a dataset. + + Returns: + Callable[[~.ListDatasetVersionsRequest], + Awaitable[~.ListDatasetVersionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dataset_versions" not in self._stubs: + self._stubs["list_dataset_versions"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/ListDatasetVersions", + request_serializer=maps_platform_datasets.ListDatasetVersionsRequest.serialize, + response_deserializer=maps_platform_datasets.ListDatasetVersionsResponse.deserialize, + ) + return self._stubs["list_dataset_versions"] + + @property + def list_datasets( + self, + ) -> Callable[ + [maps_platform_datasets.ListDatasetsRequest], + Awaitable[maps_platform_datasets.ListDatasetsResponse], + ]: + r"""Return a callable for the list datasets method over gRPC. + + List all the datasets for the specified project. + + Returns: + Callable[[~.ListDatasetsRequest], + Awaitable[~.ListDatasetsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_datasets" not in self._stubs: + self._stubs["list_datasets"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/ListDatasets", + request_serializer=maps_platform_datasets.ListDatasetsRequest.serialize, + response_deserializer=maps_platform_datasets.ListDatasetsResponse.deserialize, + ) + return self._stubs["list_datasets"] + + @property + def delete_dataset( + self, + ) -> Callable[ + [maps_platform_datasets.DeleteDatasetRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete dataset method over gRPC. + + Delete the specified dataset and optionally all its + corresponding versions. + + Returns: + Callable[[~.DeleteDatasetRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dataset" not in self._stubs: + self._stubs["delete_dataset"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/DeleteDataset", + request_serializer=maps_platform_datasets.DeleteDatasetRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_dataset"] + + @property + def delete_dataset_version( + self, + ) -> Callable[ + [maps_platform_datasets.DeleteDatasetVersionRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete dataset version method over gRPC. + + Delete a specific version of the dataset. + + Returns: + Callable[[~.DeleteDatasetVersionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dataset_version" not in self._stubs: + self._stubs["delete_dataset_version"] = self.grpc_channel.unary_unary( + "/google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha/DeleteDatasetVersion", + request_serializer=maps_platform_datasets.DeleteDatasetVersionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_dataset_version"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/rest.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/rest.py new file mode 100644 index 000000000000..d8120b7c5c0d --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/services/maps_platform_datasets_v1_alpha/transports/rest.py @@ -0,0 +1,1053 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset +from google.maps.mapsplatformdatasets_v1alpha.types import maps_platform_datasets +from google.maps.mapsplatformdatasets_v1alpha.types import dataset + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import MapsPlatformDatasetsV1AlphaTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MapsPlatformDatasetsV1AlphaRestInterceptor: + """Interceptor for MapsPlatformDatasetsV1Alpha. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MapsPlatformDatasetsV1AlphaRestTransport. + + .. code-block:: python + class MyCustomMapsPlatformDatasetsV1AlphaInterceptor(MapsPlatformDatasetsV1AlphaRestInterceptor): + def pre_create_dataset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_dataset(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_dataset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_dataset_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_dataset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_dataset(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_datasets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_datasets(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_dataset_versions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_dataset_versions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_dataset_metadata(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_dataset_metadata(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MapsPlatformDatasetsV1AlphaRestTransport(interceptor=MyCustomMapsPlatformDatasetsV1AlphaInterceptor()) + client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + + + """ + + def pre_create_dataset( + self, + request: maps_platform_datasets.CreateDatasetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[maps_platform_datasets.CreateDatasetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_dataset + + Override in a subclass to manipulate the request or metadata + before they are sent to the MapsPlatformDatasetsV1Alpha server. + """ + return request, metadata + + def post_create_dataset(self, response: gmm_dataset.Dataset) -> gmm_dataset.Dataset: + """Post-rpc interceptor for create_dataset + + Override in a subclass to manipulate the response + after it is returned by the MapsPlatformDatasetsV1Alpha server but before + it is returned to user code. + """ + return response + + def pre_delete_dataset( + self, + request: maps_platform_datasets.DeleteDatasetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[maps_platform_datasets.DeleteDatasetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_dataset + + Override in a subclass to manipulate the request or metadata + before they are sent to the MapsPlatformDatasetsV1Alpha server. + """ + return request, metadata + + def pre_delete_dataset_version( + self, + request: maps_platform_datasets.DeleteDatasetVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + maps_platform_datasets.DeleteDatasetVersionRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_dataset_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the MapsPlatformDatasetsV1Alpha server. + """ + return request, metadata + + def pre_get_dataset( + self, + request: maps_platform_datasets.GetDatasetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[maps_platform_datasets.GetDatasetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_dataset + + Override in a subclass to manipulate the request or metadata + before they are sent to the MapsPlatformDatasetsV1Alpha server. + """ + return request, metadata + + def post_get_dataset(self, response: dataset.Dataset) -> dataset.Dataset: + """Post-rpc interceptor for get_dataset + + Override in a subclass to manipulate the response + after it is returned by the MapsPlatformDatasetsV1Alpha server but before + it is returned to user code. + """ + return response + + def pre_list_datasets( + self, + request: maps_platform_datasets.ListDatasetsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[maps_platform_datasets.ListDatasetsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_datasets + + Override in a subclass to manipulate the request or metadata + before they are sent to the MapsPlatformDatasetsV1Alpha server. + """ + return request, metadata + + def post_list_datasets( + self, response: maps_platform_datasets.ListDatasetsResponse + ) -> maps_platform_datasets.ListDatasetsResponse: + """Post-rpc interceptor for list_datasets + + Override in a subclass to manipulate the response + after it is returned by the MapsPlatformDatasetsV1Alpha server but before + it is returned to user code. + """ + return response + + def pre_list_dataset_versions( + self, + request: maps_platform_datasets.ListDatasetVersionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + maps_platform_datasets.ListDatasetVersionsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_dataset_versions + + Override in a subclass to manipulate the request or metadata + before they are sent to the MapsPlatformDatasetsV1Alpha server. + """ + return request, metadata + + def post_list_dataset_versions( + self, response: maps_platform_datasets.ListDatasetVersionsResponse + ) -> maps_platform_datasets.ListDatasetVersionsResponse: + """Post-rpc interceptor for list_dataset_versions + + Override in a subclass to manipulate the response + after it is returned by the MapsPlatformDatasetsV1Alpha server but before + it is returned to user code. + """ + return response + + def pre_update_dataset_metadata( + self, + request: maps_platform_datasets.UpdateDatasetMetadataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + maps_platform_datasets.UpdateDatasetMetadataRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_dataset_metadata + + Override in a subclass to manipulate the request or metadata + before they are sent to the MapsPlatformDatasetsV1Alpha server. + """ + return request, metadata + + def post_update_dataset_metadata( + self, response: gmm_dataset.Dataset + ) -> gmm_dataset.Dataset: + """Post-rpc interceptor for update_dataset_metadata + + Override in a subclass to manipulate the response + after it is returned by the MapsPlatformDatasetsV1Alpha server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MapsPlatformDatasetsV1AlphaRestStub: + _session: AuthorizedSession + _host: str + _interceptor: MapsPlatformDatasetsV1AlphaRestInterceptor + + +class MapsPlatformDatasetsV1AlphaRestTransport(MapsPlatformDatasetsV1AlphaTransport): + """REST backend transport for MapsPlatformDatasetsV1Alpha. + + Service definition for the Maps Platform Datasets API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "mapsplatformdatasets.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MapsPlatformDatasetsV1AlphaRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MapsPlatformDatasetsV1AlphaRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateDataset(MapsPlatformDatasetsV1AlphaRestStub): + def __hash__(self): + return hash("CreateDataset") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: maps_platform_datasets.CreateDatasetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gmm_dataset.Dataset: + r"""Call the create dataset method over HTTP. + + Args: + request (~.maps_platform_datasets.CreateDatasetRequest): + The request object. Request to create a maps dataset. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gmm_dataset.Dataset: + A representation of a maps platform + dataset. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*}/datasets", + "body": "dataset", + }, + ] + request, metadata = self._interceptor.pre_create_dataset(request, metadata) + pb_request = maps_platform_datasets.CreateDatasetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gmm_dataset.Dataset() + pb_resp = gmm_dataset.Dataset.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_dataset(resp) + return resp + + class _DeleteDataset(MapsPlatformDatasetsV1AlphaRestStub): + def __hash__(self): + return hash("DeleteDataset") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: maps_platform_datasets.DeleteDatasetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete dataset method over HTTP. + + Args: + request (~.maps_platform_datasets.DeleteDatasetRequest): + The request object. Request to delete a dataset. + The dataset to be deleted. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/datasets/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_dataset(request, metadata) + pb_request = maps_platform_datasets.DeleteDatasetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteDatasetVersion(MapsPlatformDatasetsV1AlphaRestStub): + def __hash__(self): + return hash("DeleteDatasetVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: maps_platform_datasets.DeleteDatasetVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete dataset version method over HTTP. + + Args: + request (~.maps_platform_datasets.DeleteDatasetVersionRequest): + The request object. Request to delete a version of a + dataset. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/datasets/*}:deleteVersion", + }, + ] + request, metadata = self._interceptor.pre_delete_dataset_version( + request, metadata + ) + pb_request = maps_platform_datasets.DeleteDatasetVersionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetDataset(MapsPlatformDatasetsV1AlphaRestStub): + def __hash__(self): + return hash("GetDataset") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: maps_platform_datasets.GetDatasetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dataset.Dataset: + r"""Call the get dataset method over HTTP. + + Args: + request (~.maps_platform_datasets.GetDatasetRequest): + The request object. Request to get the specified dataset. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dataset.Dataset: + A representation of a maps platform + dataset. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/datasets/*}", + }, + ] + request, metadata = self._interceptor.pre_get_dataset(request, metadata) + pb_request = maps_platform_datasets.GetDatasetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dataset.Dataset() + pb_resp = dataset.Dataset.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_dataset(resp) + return resp + + class _ListDatasets(MapsPlatformDatasetsV1AlphaRestStub): + def __hash__(self): + return hash("ListDatasets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: maps_platform_datasets.ListDatasetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> maps_platform_datasets.ListDatasetsResponse: + r"""Call the list datasets method over HTTP. + + Args: + request (~.maps_platform_datasets.ListDatasetsRequest): + The request object. Request to list datasets for the + project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.maps_platform_datasets.ListDatasetsResponse: + Response to list datasets for the + project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*}/datasets", + }, + ] + request, metadata = self._interceptor.pre_list_datasets(request, metadata) + pb_request = maps_platform_datasets.ListDatasetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = maps_platform_datasets.ListDatasetsResponse() + pb_resp = maps_platform_datasets.ListDatasetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_datasets(resp) + return resp + + class _ListDatasetVersions(MapsPlatformDatasetsV1AlphaRestStub): + def __hash__(self): + return hash("ListDatasetVersions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: maps_platform_datasets.ListDatasetVersionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> maps_platform_datasets.ListDatasetVersionsResponse: + r"""Call the list dataset versions method over HTTP. + + Args: + request (~.maps_platform_datasets.ListDatasetVersionsRequest): + The request object. Request to list of all versions of + the dataset. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.maps_platform_datasets.ListDatasetVersionsResponse: + Response with list of all versions of + the dataset. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/datasets/*}:listVersions", + }, + ] + request, metadata = self._interceptor.pre_list_dataset_versions( + request, metadata + ) + pb_request = maps_platform_datasets.ListDatasetVersionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = maps_platform_datasets.ListDatasetVersionsResponse() + pb_resp = maps_platform_datasets.ListDatasetVersionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_dataset_versions(resp) + return resp + + class _UpdateDatasetMetadata(MapsPlatformDatasetsV1AlphaRestStub): + def __hash__(self): + return hash("UpdateDatasetMetadata") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: maps_platform_datasets.UpdateDatasetMetadataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gmm_dataset.Dataset: + r"""Call the update dataset metadata method over HTTP. + + Args: + request (~.maps_platform_datasets.UpdateDatasetMetadataRequest): + The request object. Request to update the metadata fields + of the dataset. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gmm_dataset.Dataset: + A representation of a maps platform + dataset. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{dataset.name=projects/*/datasets/*}", + "body": "dataset", + }, + ] + request, metadata = self._interceptor.pre_update_dataset_metadata( + request, metadata + ) + pb_request = maps_platform_datasets.UpdateDatasetMetadataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gmm_dataset.Dataset() + pb_resp = gmm_dataset.Dataset.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_dataset_metadata(resp) + return resp + + @property + def create_dataset( + self, + ) -> Callable[[maps_platform_datasets.CreateDatasetRequest], gmm_dataset.Dataset]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDataset(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_dataset( + self, + ) -> Callable[[maps_platform_datasets.DeleteDatasetRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDataset(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_dataset_version( + self, + ) -> Callable[ + [maps_platform_datasets.DeleteDatasetVersionRequest], empty_pb2.Empty + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDatasetVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_dataset( + self, + ) -> Callable[[maps_platform_datasets.GetDatasetRequest], dataset.Dataset]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataset(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_datasets( + self, + ) -> Callable[ + [maps_platform_datasets.ListDatasetsRequest], + maps_platform_datasets.ListDatasetsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDatasets(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_dataset_versions( + self, + ) -> Callable[ + [maps_platform_datasets.ListDatasetVersionsRequest], + maps_platform_datasets.ListDatasetVersionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDatasetVersions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_dataset_metadata( + self, + ) -> Callable[ + [maps_platform_datasets.UpdateDatasetMetadataRequest], gmm_dataset.Dataset + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDatasetMetadata(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("MapsPlatformDatasetsV1AlphaRestTransport",) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/__init__.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/__init__.py new file mode 100644 index 000000000000..b8711c93fcdc --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/__init__.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .data_source import FileFormat, GcsSource, LocalFileSource +from .dataset import Dataset, State, Usage +from .maps_platform_datasets import ( + CreateDatasetRequest, + DeleteDatasetRequest, + DeleteDatasetVersionRequest, + GetDatasetRequest, + ListDatasetsRequest, + ListDatasetsResponse, + ListDatasetVersionsRequest, + ListDatasetVersionsResponse, + UpdateDatasetMetadataRequest, +) + +__all__ = ( + "GcsSource", + "LocalFileSource", + "FileFormat", + "Dataset", + "State", + "Usage", + "CreateDatasetRequest", + "DeleteDatasetRequest", + "DeleteDatasetVersionRequest", + "GetDatasetRequest", + "ListDatasetsRequest", + "ListDatasetsResponse", + "ListDatasetVersionsRequest", + "ListDatasetVersionsResponse", + "UpdateDatasetMetadataRequest", +) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/data_source.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/data_source.py new file mode 100644 index 000000000000..f92d79ccd5c5 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/data_source.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.mapsplatformdatasets.v1alpha", + manifest={ + "FileFormat", + "LocalFileSource", + "GcsSource", + }, +) + + +class FileFormat(proto.Enum): + r"""The format of the file being uploaded. + + Values: + FILE_FORMAT_UNSPECIFIED (0): + Unspecified file format. + FILE_FORMAT_GEOJSON (1): + GeoJson file. + FILE_FORMAT_KML (2): + KML file. + FILE_FORMAT_CSV (3): + CSV file. + FILE_FORMAT_PROTO (4): + Protobuf file. + FILE_FORMAT_KMZ (5): + KMZ file. + """ + FILE_FORMAT_UNSPECIFIED = 0 + FILE_FORMAT_GEOJSON = 1 + FILE_FORMAT_KML = 2 + FILE_FORMAT_CSV = 3 + FILE_FORMAT_PROTO = 4 + FILE_FORMAT_KMZ = 5 + + +class LocalFileSource(proto.Message): + r"""The details about the data source when it is a local file. + + Attributes: + filename (str): + The file name and extension of the uploaded + file. + file_format (google.maps.mapsplatformdatasets_v1alpha.types.FileFormat): + The format of the file that is being + uploaded. + """ + + filename: str = proto.Field( + proto.STRING, + number=1, + ) + file_format: "FileFormat" = proto.Field( + proto.ENUM, + number=2, + enum="FileFormat", + ) + + +class GcsSource(proto.Message): + r"""The details about the data source when it is in Google Cloud + Storage. + + Attributes: + input_uri (str): + Source data URI. For example, ``gs://my_bucket/my_object``. + file_format (google.maps.mapsplatformdatasets_v1alpha.types.FileFormat): + The file format of the Google Cloud Storage + object. This is used mainly for validation. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + file_format: "FileFormat" = proto.Field( + proto.ENUM, + number=2, + enum="FileFormat", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/dataset.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/dataset.py new file mode 100644 index 000000000000..ac19c9eae15c --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/dataset.py @@ -0,0 +1,190 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.maps.mapsplatformdatasets_v1alpha.types import data_source + +__protobuf__ = proto.module( + package="google.maps.mapsplatformdatasets.v1alpha", + manifest={ + "Usage", + "State", + "Dataset", + }, +) + + +class Usage(proto.Enum): + r"""Usage specifies where the data is intended to be used to + inform how to process the data. + + Values: + USAGE_UNSPECIFIED (0): + The usage of this dataset is not set. + USAGE_DATA_DRIVEN_STYLING (1): + This dataset will be used for data driven + styling. + USAGE_AREA_AFFORDANCES (2): + This dataset will be used for area + affordances in routing. + USAGE_ASSISTED_DRIVING (3): + This dataset will be used for assisted + driving in routing. + """ + USAGE_UNSPECIFIED = 0 + USAGE_DATA_DRIVEN_STYLING = 1 + USAGE_AREA_AFFORDANCES = 2 + USAGE_ASSISTED_DRIVING = 3 + + +class State(proto.Enum): + r"""State specifies the status of the import of the latest + dataset version. + + Values: + STATE_UNSPECIFIED (0): + The state of this dataset is not set. + STATE_IMPORTING (1): + The dataset version is getting imported. + STATE_IMPORT_SUCCEEDED (2): + The dataset version succeeded in getting + imported. + STATE_IMPORT_FAILED (3): + The dataset version failed to get imported. + """ + STATE_UNSPECIFIED = 0 + STATE_IMPORTING = 1 + STATE_IMPORT_SUCCEEDED = 2 + STATE_IMPORT_FAILED = 3 + + +class Dataset(proto.Message): + r"""A representation of a maps platform dataset. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Resource name, projects/{project}/datasets/{dataset_id} + display_name (str): + Human readable name, shown in the console UI. + Set by customer. + description (str): + A description of this dataset; set by the + customer. + version_id (str): + The version of the dataset. + usage (MutableSequence[google.maps.mapsplatformdatasets_v1alpha.types.Usage]): + Specified use case(s) for this dataset. + local_file_source (google.maps.mapsplatformdatasets_v1alpha.types.LocalFileSource): + A local file source for the dataset for a + single upload. + + This field is a member of `oneof`_ ``data_source``. + gcs_source (google.maps.mapsplatformdatasets_v1alpha.types.GcsSource): + A Google Cloud Storage file source for the + dataset for a single upload. + + This field is a member of `oneof`_ ``data_source``. + status (google.maps.mapsplatformdatasets_v1alpha.types.State): + The status of the import of the latest + dataset version. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the dataset was first + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the dataset metadata + was last updated. + version_create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this version of + dataset was created. (It happened when importing + data to the dataset) + version_description (str): + Output only. The description for this version + of dataset. It is provided when importing data + to the dataset. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + version_id: str = proto.Field( + proto.STRING, + number=4, + ) + usage: MutableSequence["Usage"] = proto.RepeatedField( + proto.ENUM, + number=5, + enum="Usage", + ) + local_file_source: data_source.LocalFileSource = proto.Field( + proto.MESSAGE, + number=6, + oneof="data_source", + message=data_source.LocalFileSource, + ) + gcs_source: data_source.GcsSource = proto.Field( + proto.MESSAGE, + number=7, + oneof="data_source", + message=data_source.GcsSource, + ) + status: "State" = proto.Field( + proto.ENUM, + number=12, + enum="State", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + version_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + version_description: str = proto.Field( + proto.STRING, + number=11, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/maps_platform_datasets.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/maps_platform_datasets.py new file mode 100644 index 000000000000..bd7556779636 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/maps_platform_datasets.py @@ -0,0 +1,277 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset + +__protobuf__ = proto.module( + package="google.maps.mapsplatformdatasets.v1alpha", + manifest={ + "CreateDatasetRequest", + "UpdateDatasetMetadataRequest", + "GetDatasetRequest", + "ListDatasetVersionsRequest", + "ListDatasetVersionsResponse", + "ListDatasetsRequest", + "ListDatasetsResponse", + "DeleteDatasetRequest", + "DeleteDatasetVersionRequest", + }, +) + + +class CreateDatasetRequest(proto.Message): + r"""Request to create a maps dataset. + + Attributes: + parent (str): + Required. Parent project that will own the dataset. Format: + projects/{$project_number} + dataset (google.maps.mapsplatformdatasets_v1alpha.types.Dataset): + Required. The dataset version to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + dataset: gmm_dataset.Dataset = proto.Field( + proto.MESSAGE, + number=2, + message=gmm_dataset.Dataset, + ) + + +class UpdateDatasetMetadataRequest(proto.Message): + r"""Request to update the metadata fields of the dataset. + + Attributes: + dataset (google.maps.mapsplatformdatasets_v1alpha.types.Dataset): + Required. The dataset to update. The dataset's name is used + to identify the dataset to be updated. The name has the + format: projects/{project}/datasets/{dataset_id} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated. Support the value "*" for + full replacement. + """ + + dataset: gmm_dataset.Dataset = proto.Field( + proto.MESSAGE, + number=1, + message=gmm_dataset.Dataset, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetDatasetRequest(proto.Message): + r"""Request to get the specified dataset. + + Attributes: + name (str): + Required. Resource name. Can also fetch a specified version + projects/{project}/datasets/{dataset_id} + projects/{project}/datasets/{dataset_id}@{version-id} + + In order to retrieve a previous version of the dataset, also + provide the version ID. Example: + projects/123/datasets/assisted-driving-preferences@c7cfa2a8 + published_usage (google.maps.mapsplatformdatasets_v1alpha.types.Usage): + If specified, will fetch the dataset details + of the version published for the specified use + case rather than the latest, if one exists. If a + published version does not exist, will default + to getting the dataset details of the latest + version. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + published_usage: gmm_dataset.Usage = proto.Field( + proto.ENUM, + number=2, + enum=gmm_dataset.Usage, + ) + + +class ListDatasetVersionsRequest(proto.Message): + r"""Request to list of all versions of the dataset. + + Attributes: + name (str): + Required. The name of the dataset to list all + the versions for. + page_size (int): + The maximum number of versions to return per + page. If unspecified (or zero), at most 1000 + versions will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + The page token, received from a previous + GetDatasetVersions call. Provide this to + retrieve the subsequent page. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDatasetVersionsResponse(proto.Message): + r"""Response with list of all versions of the dataset. + + Attributes: + datasets (MutableSequence[google.maps.mapsplatformdatasets_v1alpha.types.Dataset]): + All the versions of the dataset. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + datasets: MutableSequence[gmm_dataset.Dataset] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gmm_dataset.Dataset, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListDatasetsRequest(proto.Message): + r"""Request to list datasets for the project. + + Attributes: + parent (str): + Required. The name of the project to list all + the datasets for. + page_size (int): + The maximum number of versions to return per + page. If unspecified (or zero), at most 1000 + datasets will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + The page token, received from a previous + GetDatasetVersions call. Provide this to + retrieve the subsequent page. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDatasetsResponse(proto.Message): + r"""Response to list datasets for the project. + + Attributes: + datasets (MutableSequence[google.maps.mapsplatformdatasets_v1alpha.types.Dataset]): + All the datasets for the project. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + datasets: MutableSequence[gmm_dataset.Dataset] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gmm_dataset.Dataset, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteDatasetRequest(proto.Message): + r"""Request to delete a dataset. + The dataset to be deleted. + + Attributes: + name (str): + Required. Format: projects/${project}/datasets/{dataset_id} + force (bool): + If set to true, any dataset version for this + dataset will also be deleted. (Otherwise, the + request will only work if the dataset has no + versions.) + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class DeleteDatasetVersionRequest(proto.Message): + r"""Request to delete a version of a dataset. + + Attributes: + name (str): + Required. Format: + projects/${project}/datasets/{dataset_id}@{version-id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/maps_platform_datasets_alpha_service.py b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/maps_platform_datasets_alpha_service.py new file mode 100644 index 000000000000..27d4cc38deaf --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/google/maps/mapsplatformdatasets_v1alpha/types/maps_platform_datasets_alpha_service.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.mapsplatformdatasets.v1alpha", + manifest={}, +) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-mapsplatformdatasets/mypy.ini b/packages/google-maps-mapsplatformdatasets/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-maps-mapsplatformdatasets/noxfile.py b/packages/google-maps-mapsplatformdatasets/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-maps-mapsplatformdatasets/renovate.json b/packages/google-maps-mapsplatformdatasets/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_create_dataset_async.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_create_dataset_async.py new file mode 100644 index 000000000000..4904e49cf7a3 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_create_dataset_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1_generated_MapsPlatformDatasets_CreateDataset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1 + + +async def sample_create_dataset(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.CreateDatasetRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_dataset(request=request) + + # Handle the response + print(response) + +# [END mapsplatformdatasets_v1_generated_MapsPlatformDatasets_CreateDataset_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_create_dataset_sync.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_create_dataset_sync.py new file mode 100644 index 000000000000..5eaf3438a95c --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_create_dataset_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1_generated_MapsPlatformDatasets_CreateDataset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1 + + +def sample_create_dataset(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.CreateDatasetRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_dataset(request=request) + + # Handle the response + print(response) + +# [END mapsplatformdatasets_v1_generated_MapsPlatformDatasets_CreateDataset_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_delete_dataset_async.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_delete_dataset_async.py new file mode 100644 index 000000000000..07ce84fd8d8b --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_delete_dataset_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1_generated_MapsPlatformDatasets_DeleteDataset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1 + + +async def sample_delete_dataset(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.DeleteDatasetRequest( + name="name_value", + ) + + # Make the request + await client.delete_dataset(request=request) + + +# [END mapsplatformdatasets_v1_generated_MapsPlatformDatasets_DeleteDataset_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_delete_dataset_sync.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_delete_dataset_sync.py new file mode 100644 index 000000000000..11be1951e6bb --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_delete_dataset_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1_generated_MapsPlatformDatasets_DeleteDataset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1 + + +def sample_delete_dataset(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.DeleteDatasetRequest( + name="name_value", + ) + + # Make the request + client.delete_dataset(request=request) + + +# [END mapsplatformdatasets_v1_generated_MapsPlatformDatasets_DeleteDataset_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_get_dataset_async.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_get_dataset_async.py new file mode 100644 index 000000000000..7e9a9bf36759 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_get_dataset_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1_generated_MapsPlatformDatasets_GetDataset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1 + + +async def sample_get_dataset(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.GetDatasetRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dataset(request=request) + + # Handle the response + print(response) + +# [END mapsplatformdatasets_v1_generated_MapsPlatformDatasets_GetDataset_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_get_dataset_sync.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_get_dataset_sync.py new file mode 100644 index 000000000000..0f631280fedb --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_get_dataset_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1_generated_MapsPlatformDatasets_GetDataset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1 + + +def sample_get_dataset(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.GetDatasetRequest( + name="name_value", + ) + + # Make the request + response = client.get_dataset(request=request) + + # Handle the response + print(response) + +# [END mapsplatformdatasets_v1_generated_MapsPlatformDatasets_GetDataset_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_list_datasets_async.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_list_datasets_async.py new file mode 100644 index 000000000000..c46af55e3acf --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_list_datasets_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatasets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1_generated_MapsPlatformDatasets_ListDatasets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1 + + +async def sample_list_datasets(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.ListDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_datasets(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END mapsplatformdatasets_v1_generated_MapsPlatformDatasets_ListDatasets_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_list_datasets_sync.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_list_datasets_sync.py new file mode 100644 index 000000000000..674cd5609f26 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_list_datasets_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatasets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1_generated_MapsPlatformDatasets_ListDatasets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1 + + +def sample_list_datasets(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.ListDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_datasets(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END mapsplatformdatasets_v1_generated_MapsPlatformDatasets_ListDatasets_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_update_dataset_metadata_async.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_update_dataset_metadata_async.py new file mode 100644 index 000000000000..332873237959 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_update_dataset_metadata_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatasetMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1_generated_MapsPlatformDatasets_UpdateDatasetMetadata_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1 + + +async def sample_update_dataset_metadata(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.UpdateDatasetMetadataRequest( + ) + + # Make the request + response = await client.update_dataset_metadata(request=request) + + # Handle the response + print(response) + +# [END mapsplatformdatasets_v1_generated_MapsPlatformDatasets_UpdateDatasetMetadata_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_update_dataset_metadata_sync.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_update_dataset_metadata_sync.py new file mode 100644 index 000000000000..353b9cbf3567 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1_generated_maps_platform_datasets_update_dataset_metadata_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatasetMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1_generated_MapsPlatformDatasets_UpdateDatasetMetadata_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1 + + +def sample_update_dataset_metadata(): + # Create a client + client = mapsplatformdatasets_v1.MapsPlatformDatasetsClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1.UpdateDatasetMetadataRequest( + ) + + # Make the request + response = client.update_dataset_metadata(request=request) + + # Handle the response + print(response) + +# [END mapsplatformdatasets_v1_generated_MapsPlatformDatasets_UpdateDatasetMetadata_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_async.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_async.py new file mode 100644 index 000000000000..cad0baf7d20e --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_CreateDataset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1alpha + + +async def sample_create_dataset(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.CreateDatasetRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_dataset(request=request) + + # Handle the response + print(response) + +# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_CreateDataset_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_sync.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_sync.py new file mode 100644 index 000000000000..c73204c37e0e --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_CreateDataset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1alpha + + +def sample_create_dataset(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.CreateDatasetRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_dataset(request=request) + + # Handle the response + print(response) + +# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_CreateDataset_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_async.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_async.py new file mode 100644 index 000000000000..7a10ea527d6e --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDataset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1alpha + + +async def sample_delete_dataset(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.DeleteDatasetRequest( + name="name_value", + ) + + # Make the request + await client.delete_dataset(request=request) + + +# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDataset_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_sync.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_sync.py new file mode 100644 index 000000000000..51daa4e108ed --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDataset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1alpha + + +def sample_delete_dataset(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.DeleteDatasetRequest( + name="name_value", + ) + + # Make the request + client.delete_dataset(request=request) + + +# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDataset_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_async.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_async.py new file mode 100644 index 000000000000..3e2111b38f8e --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDatasetVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDatasetVersion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1alpha + + +async def sample_delete_dataset_version(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.DeleteDatasetVersionRequest( + name="name_value", + ) + + # Make the request + await client.delete_dataset_version(request=request) + + +# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDatasetVersion_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_sync.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_sync.py new file mode 100644 index 000000000000..e009a3af0ee3 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDatasetVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDatasetVersion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1alpha + + +def sample_delete_dataset_version(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.DeleteDatasetVersionRequest( + name="name_value", + ) + + # Make the request + client.delete_dataset_version(request=request) + + +# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDatasetVersion_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_async.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_async.py new file mode 100644 index 000000000000..8ee04b1ec888 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_GetDataset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1alpha + + +async def sample_get_dataset(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.GetDatasetRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dataset(request=request) + + # Handle the response + print(response) + +# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_GetDataset_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_sync.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_sync.py new file mode 100644 index 000000000000..2cd62e2d9e92 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_GetDataset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1alpha + + +def sample_get_dataset(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.GetDatasetRequest( + name="name_value", + ) + + # Make the request + response = client.get_dataset(request=request) + + # Handle the response + print(response) + +# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_GetDataset_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_async.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_async.py new file mode 100644 index 000000000000..d87990cd85a5 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatasetVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasetVersions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1alpha + + +async def sample_list_dataset_versions(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.ListDatasetVersionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_dataset_versions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasetVersions_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_sync.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_sync.py new file mode 100644 index 000000000000..423cb63c5f87 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatasetVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasetVersions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1alpha + + +def sample_list_dataset_versions(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.ListDatasetVersionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_dataset_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasetVersions_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_async.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_async.py new file mode 100644 index 000000000000..d45f997077b7 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatasets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1alpha + + +async def sample_list_datasets(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.ListDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_datasets(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasets_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_sync.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_sync.py new file mode 100644 index 000000000000..746cfc4674de --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatasets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1alpha + + +def sample_list_datasets(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.ListDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_datasets(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasets_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_async.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_async.py new file mode 100644 index 000000000000..e3a8deadaa5d --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatasetMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_UpdateDatasetMetadata_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1alpha + + +async def sample_update_dataset_metadata(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.UpdateDatasetMetadataRequest( + ) + + # Make the request + response = await client.update_dataset_metadata(request=request) + + # Handle the response + print(response) + +# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_UpdateDatasetMetadata_async] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_sync.py b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_sync.py new file mode 100644 index 000000000000..c45c69d1c9ef --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatasetMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-mapsplatformdatasets + + +# [START mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_UpdateDatasetMetadata_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import mapsplatformdatasets_v1alpha + + +def sample_update_dataset_metadata(): + # Create a client + client = mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient() + + # Initialize request argument(s) + request = mapsplatformdatasets_v1alpha.UpdateDatasetMetadataRequest( + ) + + # Make the request + response = client.update_dataset_metadata(request=request) + + # Handle the response + print(response) + +# [END mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_UpdateDatasetMetadata_sync] diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1.json b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1.json new file mode 100644 index 000000000000..a64c4e935462 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1.json @@ -0,0 +1,830 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.maps.mapsplatformdatasets.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-maps-mapsplatformdatasets", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient", + "shortName": "MapsPlatformDatasetsAsyncClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient.create_dataset", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets.CreateDataset", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets", + "shortName": "MapsPlatformDatasets" + }, + "shortName": "CreateDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1.types.CreateDatasetRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "dataset", + "type": "google.maps.mapsplatformdatasets_v1.types.Dataset" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1.types.Dataset", + "shortName": "create_dataset" + }, + "description": "Sample for CreateDataset", + "file": "mapsplatformdatasets_v1_generated_maps_platform_datasets_create_dataset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1_generated_MapsPlatformDatasets_CreateDataset_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1_generated_maps_platform_datasets_create_dataset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsClient", + "shortName": "MapsPlatformDatasetsClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsClient.create_dataset", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets.CreateDataset", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets", + "shortName": "MapsPlatformDatasets" + }, + "shortName": "CreateDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1.types.CreateDatasetRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "dataset", + "type": "google.maps.mapsplatformdatasets_v1.types.Dataset" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1.types.Dataset", + "shortName": "create_dataset" + }, + "description": "Sample for CreateDataset", + "file": "mapsplatformdatasets_v1_generated_maps_platform_datasets_create_dataset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1_generated_MapsPlatformDatasets_CreateDataset_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1_generated_maps_platform_datasets_create_dataset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient", + "shortName": "MapsPlatformDatasetsAsyncClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient.delete_dataset", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets.DeleteDataset", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets", + "shortName": "MapsPlatformDatasets" + }, + "shortName": "DeleteDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1.types.DeleteDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_dataset" + }, + "description": "Sample for DeleteDataset", + "file": "mapsplatformdatasets_v1_generated_maps_platform_datasets_delete_dataset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1_generated_MapsPlatformDatasets_DeleteDataset_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1_generated_maps_platform_datasets_delete_dataset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsClient", + "shortName": "MapsPlatformDatasetsClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsClient.delete_dataset", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets.DeleteDataset", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets", + "shortName": "MapsPlatformDatasets" + }, + "shortName": "DeleteDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1.types.DeleteDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_dataset" + }, + "description": "Sample for DeleteDataset", + "file": "mapsplatformdatasets_v1_generated_maps_platform_datasets_delete_dataset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1_generated_MapsPlatformDatasets_DeleteDataset_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1_generated_maps_platform_datasets_delete_dataset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient", + "shortName": "MapsPlatformDatasetsAsyncClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient.get_dataset", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets.GetDataset", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets", + "shortName": "MapsPlatformDatasets" + }, + "shortName": "GetDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1.types.GetDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1.types.Dataset", + "shortName": "get_dataset" + }, + "description": "Sample for GetDataset", + "file": "mapsplatformdatasets_v1_generated_maps_platform_datasets_get_dataset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1_generated_MapsPlatformDatasets_GetDataset_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1_generated_maps_platform_datasets_get_dataset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsClient", + "shortName": "MapsPlatformDatasetsClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsClient.get_dataset", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets.GetDataset", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets", + "shortName": "MapsPlatformDatasets" + }, + "shortName": "GetDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1.types.GetDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1.types.Dataset", + "shortName": "get_dataset" + }, + "description": "Sample for GetDataset", + "file": "mapsplatformdatasets_v1_generated_maps_platform_datasets_get_dataset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1_generated_MapsPlatformDatasets_GetDataset_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1_generated_maps_platform_datasets_get_dataset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient", + "shortName": "MapsPlatformDatasetsAsyncClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient.list_datasets", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets.ListDatasets", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets", + "shortName": "MapsPlatformDatasets" + }, + "shortName": "ListDatasets" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1.types.ListDatasetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets.pagers.ListDatasetsAsyncPager", + "shortName": "list_datasets" + }, + "description": "Sample for ListDatasets", + "file": "mapsplatformdatasets_v1_generated_maps_platform_datasets_list_datasets_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1_generated_MapsPlatformDatasets_ListDatasets_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1_generated_maps_platform_datasets_list_datasets_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsClient", + "shortName": "MapsPlatformDatasetsClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsClient.list_datasets", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets.ListDatasets", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets", + "shortName": "MapsPlatformDatasets" + }, + "shortName": "ListDatasets" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1.types.ListDatasetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets.pagers.ListDatasetsPager", + "shortName": "list_datasets" + }, + "description": "Sample for ListDatasets", + "file": "mapsplatformdatasets_v1_generated_maps_platform_datasets_list_datasets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1_generated_MapsPlatformDatasets_ListDatasets_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1_generated_maps_platform_datasets_list_datasets_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient", + "shortName": "MapsPlatformDatasetsAsyncClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsAsyncClient.update_dataset_metadata", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets.UpdateDatasetMetadata", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets", + "shortName": "MapsPlatformDatasets" + }, + "shortName": "UpdateDatasetMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1.types.UpdateDatasetMetadataRequest" + }, + { + "name": "dataset", + "type": "google.maps.mapsplatformdatasets_v1.types.Dataset" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1.types.Dataset", + "shortName": "update_dataset_metadata" + }, + "description": "Sample for UpdateDatasetMetadata", + "file": "mapsplatformdatasets_v1_generated_maps_platform_datasets_update_dataset_metadata_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1_generated_MapsPlatformDatasets_UpdateDatasetMetadata_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1_generated_maps_platform_datasets_update_dataset_metadata_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsClient", + "shortName": "MapsPlatformDatasetsClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1.MapsPlatformDatasetsClient.update_dataset_metadata", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets.UpdateDatasetMetadata", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1.MapsPlatformDatasets", + "shortName": "MapsPlatformDatasets" + }, + "shortName": "UpdateDatasetMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1.types.UpdateDatasetMetadataRequest" + }, + { + "name": "dataset", + "type": "google.maps.mapsplatformdatasets_v1.types.Dataset" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1.types.Dataset", + "shortName": "update_dataset_metadata" + }, + "description": "Sample for UpdateDatasetMetadata", + "file": "mapsplatformdatasets_v1_generated_maps_platform_datasets_update_dataset_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1_generated_MapsPlatformDatasets_UpdateDatasetMetadata_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1_generated_maps_platform_datasets_update_dataset_metadata_sync.py" + } + ] +} diff --git a/packages/google-maps-mapsplatformdatasets/samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1alpha.json b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1alpha.json new file mode 100644 index 000000000000..daa654ea3a20 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1alpha.json @@ -0,0 +1,1146 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.maps.mapsplatformdatasets.v1alpha", + "version": "v1alpha" + } + ], + "language": "PYTHON", + "name": "google-maps-mapsplatformdatasets", + "version": "0.3.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient", + "shortName": "MapsPlatformDatasetsV1AlphaAsyncClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient.create_dataset", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.CreateDataset", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", + "shortName": "MapsPlatformDatasetsV1Alpha" + }, + "shortName": "CreateDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.CreateDatasetRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "dataset", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset", + "shortName": "create_dataset" + }, + "description": "Sample for CreateDataset", + "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_CreateDataset_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient", + "shortName": "MapsPlatformDatasetsV1AlphaClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient.create_dataset", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.CreateDataset", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", + "shortName": "MapsPlatformDatasetsV1Alpha" + }, + "shortName": "CreateDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.CreateDatasetRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "dataset", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset", + "shortName": "create_dataset" + }, + "description": "Sample for CreateDataset", + "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_CreateDataset_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_create_dataset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient", + "shortName": "MapsPlatformDatasetsV1AlphaAsyncClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient.delete_dataset_version", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.DeleteDatasetVersion", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", + "shortName": "MapsPlatformDatasetsV1Alpha" + }, + "shortName": "DeleteDatasetVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetVersionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_dataset_version" + }, + "description": "Sample for DeleteDatasetVersion", + "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDatasetVersion_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient", + "shortName": "MapsPlatformDatasetsV1AlphaClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient.delete_dataset_version", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.DeleteDatasetVersion", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", + "shortName": "MapsPlatformDatasetsV1Alpha" + }, + "shortName": "DeleteDatasetVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetVersionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_dataset_version" + }, + "description": "Sample for DeleteDatasetVersion", + "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDatasetVersion_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_version_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient", + "shortName": "MapsPlatformDatasetsV1AlphaAsyncClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient.delete_dataset", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.DeleteDataset", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", + "shortName": "MapsPlatformDatasetsV1Alpha" + }, + "shortName": "DeleteDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_dataset" + }, + "description": "Sample for DeleteDataset", + "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDataset_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient", + "shortName": "MapsPlatformDatasetsV1AlphaClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient.delete_dataset", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.DeleteDataset", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", + "shortName": "MapsPlatformDatasetsV1Alpha" + }, + "shortName": "DeleteDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.DeleteDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_dataset" + }, + "description": "Sample for DeleteDataset", + "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_DeleteDataset_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_delete_dataset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient", + "shortName": "MapsPlatformDatasetsV1AlphaAsyncClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient.get_dataset", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.GetDataset", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", + "shortName": "MapsPlatformDatasetsV1Alpha" + }, + "shortName": "GetDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.GetDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset", + "shortName": "get_dataset" + }, + "description": "Sample for GetDataset", + "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_GetDataset_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient", + "shortName": "MapsPlatformDatasetsV1AlphaClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient.get_dataset", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.GetDataset", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", + "shortName": "MapsPlatformDatasetsV1Alpha" + }, + "shortName": "GetDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.GetDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset", + "shortName": "get_dataset" + }, + "description": "Sample for GetDataset", + "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_GetDataset_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_get_dataset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient", + "shortName": "MapsPlatformDatasetsV1AlphaAsyncClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient.list_dataset_versions", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.ListDatasetVersions", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", + "shortName": "MapsPlatformDatasetsV1Alpha" + }, + "shortName": "ListDatasetVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetVersionsAsyncPager", + "shortName": "list_dataset_versions" + }, + "description": "Sample for ListDatasetVersions", + "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasetVersions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient", + "shortName": "MapsPlatformDatasetsV1AlphaClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient.list_dataset_versions", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.ListDatasetVersions", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", + "shortName": "MapsPlatformDatasetsV1Alpha" + }, + "shortName": "ListDatasetVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetVersionsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetVersionsPager", + "shortName": "list_dataset_versions" + }, + "description": "Sample for ListDatasetVersions", + "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasetVersions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_dataset_versions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient", + "shortName": "MapsPlatformDatasetsV1AlphaAsyncClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient.list_datasets", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.ListDatasets", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", + "shortName": "MapsPlatformDatasetsV1Alpha" + }, + "shortName": "ListDatasets" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetsAsyncPager", + "shortName": "list_datasets" + }, + "description": "Sample for ListDatasets", + "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasets_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient", + "shortName": "MapsPlatformDatasetsV1AlphaClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient.list_datasets", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.ListDatasets", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", + "shortName": "MapsPlatformDatasetsV1Alpha" + }, + "shortName": "ListDatasets" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.ListDatasetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.pagers.ListDatasetsPager", + "shortName": "list_datasets" + }, + "description": "Sample for ListDatasets", + "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_ListDatasets_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_list_datasets_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient", + "shortName": "MapsPlatformDatasetsV1AlphaAsyncClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaAsyncClient.update_dataset_metadata", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.UpdateDatasetMetadata", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", + "shortName": "MapsPlatformDatasetsV1Alpha" + }, + "shortName": "UpdateDatasetMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.UpdateDatasetMetadataRequest" + }, + { + "name": "dataset", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset", + "shortName": "update_dataset_metadata" + }, + "description": "Sample for UpdateDatasetMetadata", + "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_UpdateDatasetMetadata_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient", + "shortName": "MapsPlatformDatasetsV1AlphaClient" + }, + "fullName": "google.maps.mapsplatformdatasets_v1alpha.MapsPlatformDatasetsV1AlphaClient.update_dataset_metadata", + "method": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha.UpdateDatasetMetadata", + "service": { + "fullName": "google.maps.mapsplatformdatasets.v1alpha.MapsPlatformDatasetsV1Alpha", + "shortName": "MapsPlatformDatasetsV1Alpha" + }, + "shortName": "UpdateDatasetMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.UpdateDatasetMetadataRequest" + }, + { + "name": "dataset", + "type": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.mapsplatformdatasets_v1alpha.types.Dataset", + "shortName": "update_dataset_metadata" + }, + "description": "Sample for UpdateDatasetMetadata", + "file": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "mapsplatformdatasets_v1alpha_generated_MapsPlatformDatasetsV1Alpha_UpdateDatasetMetadata_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "mapsplatformdatasets_v1alpha_generated_maps_platform_datasets_v1_alpha_update_dataset_metadata_sync.py" + } + ] +} diff --git a/packages/google-maps-mapsplatformdatasets/scripts/decrypt-secrets.sh b/packages/google-maps-mapsplatformdatasets/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-maps-mapsplatformdatasets/scripts/fixup_mapsplatformdatasets_v1_keywords.py b/packages/google-maps-mapsplatformdatasets/scripts/fixup_mapsplatformdatasets_v1_keywords.py new file mode 100644 index 000000000000..1c3ae434d137 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/scripts/fixup_mapsplatformdatasets_v1_keywords.py @@ -0,0 +1,180 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class mapsplatformdatasetsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_dataset': ('parent', 'dataset', ), + 'delete_dataset': ('name', ), + 'get_dataset': ('name', ), + 'list_datasets': ('parent', 'page_size', 'page_token', ), + 'update_dataset_metadata': ('dataset', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=mapsplatformdatasetsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the mapsplatformdatasets client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-maps-mapsplatformdatasets/scripts/fixup_mapsplatformdatasets_v1alpha_keywords.py b/packages/google-maps-mapsplatformdatasets/scripts/fixup_mapsplatformdatasets_v1alpha_keywords.py new file mode 100644 index 000000000000..46da7dd9d65a --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/scripts/fixup_mapsplatformdatasets_v1alpha_keywords.py @@ -0,0 +1,182 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class mapsplatformdatasetsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_dataset': ('parent', 'dataset', ), + 'delete_dataset': ('name', 'force', ), + 'delete_dataset_version': ('name', ), + 'get_dataset': ('name', 'published_usage', ), + 'list_datasets': ('parent', 'page_size', 'page_token', ), + 'list_dataset_versions': ('name', 'page_size', 'page_token', ), + 'update_dataset_metadata': ('dataset', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=mapsplatformdatasetsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the mapsplatformdatasets client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-maps-mapsplatformdatasets/setup.py b/packages/google-maps-mapsplatformdatasets/setup.py new file mode 100644 index 000000000000..5d3f55acfc55 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-maps-mapsplatformdatasets" + + +description = "Google Maps Mapsplatformdatasets API client library" + +version = {} +with open( + os.path.join(package_root, "google/maps/mapsplatformdatasets/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.maps"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-maps-mapsplatformdatasets/testing/.gitignore b/packages/google-maps-mapsplatformdatasets/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-maps-mapsplatformdatasets/testing/constraints-3.10.txt b/packages/google-maps-mapsplatformdatasets/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-mapsplatformdatasets/testing/constraints-3.11.txt b/packages/google-maps-mapsplatformdatasets/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-mapsplatformdatasets/testing/constraints-3.12.txt b/packages/google-maps-mapsplatformdatasets/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-mapsplatformdatasets/testing/constraints-3.7.txt b/packages/google-maps-mapsplatformdatasets/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-maps-mapsplatformdatasets/testing/constraints-3.8.txt b/packages/google-maps-mapsplatformdatasets/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-mapsplatformdatasets/testing/constraints-3.9.txt b/packages/google-maps-mapsplatformdatasets/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-maps-mapsplatformdatasets/tests/__init__.py b/packages/google-maps-mapsplatformdatasets/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-mapsplatformdatasets/tests/unit/__init__.py b/packages/google-maps-mapsplatformdatasets/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/__init__.py b/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1/__init__.py b/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1/test_maps_platform_datasets.py b/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1/test_maps_platform_datasets.py new file mode 100644 index 000000000000..95038de98b93 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1/test_maps_platform_datasets.py @@ -0,0 +1,4443 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets import ( + MapsPlatformDatasetsAsyncClient, + MapsPlatformDatasetsClient, + pagers, + transports, +) +from google.maps.mapsplatformdatasets_v1.types import data_source +from google.maps.mapsplatformdatasets_v1.types import dataset +from google.maps.mapsplatformdatasets_v1.types import dataset as gmm_dataset +from google.maps.mapsplatformdatasets_v1.types import maps_platform_datasets + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MapsPlatformDatasetsClient._get_default_mtls_endpoint(None) is None + assert ( + MapsPlatformDatasetsClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + MapsPlatformDatasetsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + MapsPlatformDatasetsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MapsPlatformDatasetsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MapsPlatformDatasetsClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MapsPlatformDatasetsClient, "grpc"), + (MapsPlatformDatasetsAsyncClient, "grpc_asyncio"), + (MapsPlatformDatasetsClient, "rest"), + ], +) +def test_maps_platform_datasets_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "mapsplatformdatasets.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://mapsplatformdatasets.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MapsPlatformDatasetsGrpcTransport, "grpc"), + (transports.MapsPlatformDatasetsGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MapsPlatformDatasetsRestTransport, "rest"), + ], +) +def test_maps_platform_datasets_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MapsPlatformDatasetsClient, "grpc"), + (MapsPlatformDatasetsAsyncClient, "grpc_asyncio"), + (MapsPlatformDatasetsClient, "rest"), + ], +) +def test_maps_platform_datasets_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "mapsplatformdatasets.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://mapsplatformdatasets.googleapis.com" + ) + + +def test_maps_platform_datasets_client_get_transport_class(): + transport = MapsPlatformDatasetsClient.get_transport_class() + available_transports = [ + transports.MapsPlatformDatasetsGrpcTransport, + transports.MapsPlatformDatasetsRestTransport, + ] + assert transport in available_transports + + transport = MapsPlatformDatasetsClient.get_transport_class("grpc") + assert transport == transports.MapsPlatformDatasetsGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MapsPlatformDatasetsClient, + transports.MapsPlatformDatasetsGrpcTransport, + "grpc", + ), + ( + MapsPlatformDatasetsAsyncClient, + transports.MapsPlatformDatasetsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + MapsPlatformDatasetsClient, + transports.MapsPlatformDatasetsRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + MapsPlatformDatasetsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MapsPlatformDatasetsClient), +) +@mock.patch.object( + MapsPlatformDatasetsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MapsPlatformDatasetsAsyncClient), +) +def test_maps_platform_datasets_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MapsPlatformDatasetsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MapsPlatformDatasetsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + MapsPlatformDatasetsClient, + transports.MapsPlatformDatasetsGrpcTransport, + "grpc", + "true", + ), + ( + MapsPlatformDatasetsAsyncClient, + transports.MapsPlatformDatasetsGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + MapsPlatformDatasetsClient, + transports.MapsPlatformDatasetsGrpcTransport, + "grpc", + "false", + ), + ( + MapsPlatformDatasetsAsyncClient, + transports.MapsPlatformDatasetsGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + MapsPlatformDatasetsClient, + transports.MapsPlatformDatasetsRestTransport, + "rest", + "true", + ), + ( + MapsPlatformDatasetsClient, + transports.MapsPlatformDatasetsRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + MapsPlatformDatasetsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MapsPlatformDatasetsClient), +) +@mock.patch.object( + MapsPlatformDatasetsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MapsPlatformDatasetsAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_maps_platform_datasets_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [MapsPlatformDatasetsClient, MapsPlatformDatasetsAsyncClient] +) +@mock.patch.object( + MapsPlatformDatasetsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MapsPlatformDatasetsClient), +) +@mock.patch.object( + MapsPlatformDatasetsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MapsPlatformDatasetsAsyncClient), +) +def test_maps_platform_datasets_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MapsPlatformDatasetsClient, + transports.MapsPlatformDatasetsGrpcTransport, + "grpc", + ), + ( + MapsPlatformDatasetsAsyncClient, + transports.MapsPlatformDatasetsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + MapsPlatformDatasetsClient, + transports.MapsPlatformDatasetsRestTransport, + "rest", + ), + ], +) +def test_maps_platform_datasets_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MapsPlatformDatasetsClient, + transports.MapsPlatformDatasetsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MapsPlatformDatasetsAsyncClient, + transports.MapsPlatformDatasetsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + MapsPlatformDatasetsClient, + transports.MapsPlatformDatasetsRestTransport, + "rest", + None, + ), + ], +) +def test_maps_platform_datasets_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_maps_platform_datasets_client_client_options_from_dict(): + with mock.patch( + "google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets.transports.MapsPlatformDatasetsGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = MapsPlatformDatasetsClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MapsPlatformDatasetsClient, + transports.MapsPlatformDatasetsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MapsPlatformDatasetsAsyncClient, + transports.MapsPlatformDatasetsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_maps_platform_datasets_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "mapsplatformdatasets.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="mapsplatformdatasets.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.CreateDatasetRequest, + dict, + ], +) +def test_create_dataset(request_type, transport: str = "grpc"): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gmm_dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + version_description="version_description_value", + local_file_source=data_source.LocalFileSource(filename="filename_value"), + ) + response = client.create_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.CreateDatasetRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gmm_dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.version_description == "version_description_value" + + +def test_create_dataset_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + client.create_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.CreateDatasetRequest() + + +@pytest.mark.asyncio +async def test_create_dataset_async( + transport: str = "grpc_asyncio", + request_type=maps_platform_datasets.CreateDatasetRequest, +): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gmm_dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + version_description="version_description_value", + ) + ) + response = await client.create_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.CreateDatasetRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gmm_dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.version_description == "version_description_value" + + +@pytest.mark.asyncio +async def test_create_dataset_async_from_dict(): + await test_create_dataset_async(request_type=dict) + + +def test_create_dataset_field_headers(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.CreateDatasetRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + call.return_value = gmm_dataset.Dataset() + client.create_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_dataset_field_headers_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.CreateDatasetRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gmm_dataset.Dataset()) + await client.create_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_dataset_flattened(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gmm_dataset.Dataset() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_dataset( + parent="parent_value", + dataset=gmm_dataset.Dataset(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].dataset + mock_val = gmm_dataset.Dataset(name="name_value") + assert arg == mock_val + + +def test_create_dataset_flattened_error(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dataset( + maps_platform_datasets.CreateDatasetRequest(), + parent="parent_value", + dataset=gmm_dataset.Dataset(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_dataset_flattened_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gmm_dataset.Dataset() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gmm_dataset.Dataset()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_dataset( + parent="parent_value", + dataset=gmm_dataset.Dataset(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].dataset + mock_val = gmm_dataset.Dataset(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_dataset_flattened_error_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_dataset( + maps_platform_datasets.CreateDatasetRequest(), + parent="parent_value", + dataset=gmm_dataset.Dataset(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.UpdateDatasetMetadataRequest, + dict, + ], +) +def test_update_dataset_metadata(request_type, transport: str = "grpc"): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dataset_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gmm_dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + version_description="version_description_value", + local_file_source=data_source.LocalFileSource(filename="filename_value"), + ) + response = client.update_dataset_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.UpdateDatasetMetadataRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gmm_dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.version_description == "version_description_value" + + +def test_update_dataset_metadata_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dataset_metadata), "__call__" + ) as call: + client.update_dataset_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.UpdateDatasetMetadataRequest() + + +@pytest.mark.asyncio +async def test_update_dataset_metadata_async( + transport: str = "grpc_asyncio", + request_type=maps_platform_datasets.UpdateDatasetMetadataRequest, +): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dataset_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gmm_dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + version_description="version_description_value", + ) + ) + response = await client.update_dataset_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.UpdateDatasetMetadataRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gmm_dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.version_description == "version_description_value" + + +@pytest.mark.asyncio +async def test_update_dataset_metadata_async_from_dict(): + await test_update_dataset_metadata_async(request_type=dict) + + +def test_update_dataset_metadata_field_headers(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.UpdateDatasetMetadataRequest() + + request.dataset.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dataset_metadata), "__call__" + ) as call: + call.return_value = gmm_dataset.Dataset() + client.update_dataset_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "dataset.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_dataset_metadata_field_headers_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.UpdateDatasetMetadataRequest() + + request.dataset.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dataset_metadata), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gmm_dataset.Dataset()) + await client.update_dataset_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "dataset.name=name_value", + ) in kw["metadata"] + + +def test_update_dataset_metadata_flattened(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dataset_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gmm_dataset.Dataset() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_dataset_metadata( + dataset=gmm_dataset.Dataset(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].dataset + mock_val = gmm_dataset.Dataset(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_dataset_metadata_flattened_error(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_dataset_metadata( + maps_platform_datasets.UpdateDatasetMetadataRequest(), + dataset=gmm_dataset.Dataset(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_dataset_metadata_flattened_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dataset_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gmm_dataset.Dataset() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gmm_dataset.Dataset()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_dataset_metadata( + dataset=gmm_dataset.Dataset(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].dataset + mock_val = gmm_dataset.Dataset(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_dataset_metadata_flattened_error_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_dataset_metadata( + maps_platform_datasets.UpdateDatasetMetadataRequest(), + dataset=gmm_dataset.Dataset(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.GetDatasetRequest, + dict, + ], +) +def test_get_dataset(request_type, transport: str = "grpc"): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + version_description="version_description_value", + local_file_source=data_source.LocalFileSource(filename="filename_value"), + ) + response = client.get_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.GetDatasetRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.version_description == "version_description_value" + + +def test_get_dataset_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + client.get_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.GetDatasetRequest() + + +@pytest.mark.asyncio +async def test_get_dataset_async( + transport: str = "grpc_asyncio", + request_type=maps_platform_datasets.GetDatasetRequest, +): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + version_description="version_description_value", + ) + ) + response = await client.get_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.GetDatasetRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.version_description == "version_description_value" + + +@pytest.mark.asyncio +async def test_get_dataset_async_from_dict(): + await test_get_dataset_async(request_type=dict) + + +def test_get_dataset_field_headers(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.GetDatasetRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + call.return_value = dataset.Dataset() + client.get_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_dataset_field_headers_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.GetDatasetRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset()) + await client.get_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_dataset_flattened(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.Dataset() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_dataset( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_dataset_flattened_error(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dataset( + maps_platform_datasets.GetDatasetRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_dataset_flattened_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.Dataset() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_dataset( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_dataset_flattened_error_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_dataset( + maps_platform_datasets.GetDatasetRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.ListDatasetsRequest, + dict, + ], +) +def test_list_datasets(request_type, transport: str = "grpc"): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = maps_platform_datasets.ListDatasetsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.ListDatasetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatasetsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_datasets_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + client.list_datasets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.ListDatasetsRequest() + + +@pytest.mark.asyncio +async def test_list_datasets_async( + transport: str = "grpc_asyncio", + request_type=maps_platform_datasets.ListDatasetsRequest, +): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + maps_platform_datasets.ListDatasetsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.ListDatasetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatasetsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_datasets_async_from_dict(): + await test_list_datasets_async(request_type=dict) + + +def test_list_datasets_field_headers(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.ListDatasetsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + call.return_value = maps_platform_datasets.ListDatasetsResponse() + client.list_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_datasets_field_headers_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.ListDatasetsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + maps_platform_datasets.ListDatasetsResponse() + ) + await client.list_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_datasets_flattened(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = maps_platform_datasets.ListDatasetsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_datasets( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_datasets_flattened_error(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_datasets( + maps_platform_datasets.ListDatasetsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_datasets_flattened_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = maps_platform_datasets.ListDatasetsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + maps_platform_datasets.ListDatasetsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_datasets( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_datasets_flattened_error_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_datasets( + maps_platform_datasets.ListDatasetsRequest(), + parent="parent_value", + ) + + +def test_list_datasets_pager(transport_name: str = "grpc"): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], + next_page_token="abc", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[], + next_page_token="def", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_datasets(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dataset.Dataset) for i in results) + + +def test_list_datasets_pages(transport_name: str = "grpc"): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], + next_page_token="abc", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[], + next_page_token="def", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], + ), + RuntimeError, + ) + pages = list(client.list_datasets(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_datasets_async_pager(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_datasets), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], + next_page_token="abc", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[], + next_page_token="def", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_datasets( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dataset.Dataset) for i in responses) + + +@pytest.mark.asyncio +async def test_list_datasets_async_pages(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_datasets), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], + next_page_token="abc", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[], + next_page_token="def", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_datasets(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.DeleteDatasetRequest, + dict, + ], +) +def test_delete_dataset(request_type, transport: str = "grpc"): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.DeleteDatasetRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dataset_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + client.delete_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.DeleteDatasetRequest() + + +@pytest.mark.asyncio +async def test_delete_dataset_async( + transport: str = "grpc_asyncio", + request_type=maps_platform_datasets.DeleteDatasetRequest, +): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.DeleteDatasetRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_dataset_async_from_dict(): + await test_delete_dataset_async(request_type=dict) + + +def test_delete_dataset_field_headers(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.DeleteDatasetRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + call.return_value = None + client.delete_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_dataset_field_headers_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.DeleteDatasetRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_dataset_flattened(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_dataset( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_dataset_flattened_error(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dataset( + maps_platform_datasets.DeleteDatasetRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_dataset_flattened_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_dataset( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_dataset_flattened_error_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_dataset( + maps_platform_datasets.DeleteDatasetRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.CreateDatasetRequest, + dict, + ], +) +def test_create_dataset_rest(request_type): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["dataset"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "version_id": "version_id_value", + "usage": [1], + "local_file_source": {"filename": "filename_value", "file_format": 1}, + "gcs_source": {"input_uri": "input_uri_value", "file_format": 1}, + "status": {"state": 1, "error_message": "error_message_value"}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "version_create_time": {}, + "version_description": "version_description_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gmm_dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + version_description="version_description_value", + local_file_source=data_source.LocalFileSource(filename="filename_value"), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gmm_dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_dataset(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gmm_dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.version_description == "version_description_value" + + +def test_create_dataset_rest_required_fields( + request_type=maps_platform_datasets.CreateDatasetRequest, +): + transport_class = transports.MapsPlatformDatasetsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gmm_dataset.Dataset() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gmm_dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_dataset(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_dataset_rest_unset_required_fields(): + transport = transports.MapsPlatformDatasetsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_dataset._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "dataset", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_dataset_rest_interceptors(null_interceptor): + transport = transports.MapsPlatformDatasetsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MapsPlatformDatasetsRestInterceptor(), + ) + client = MapsPlatformDatasetsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MapsPlatformDatasetsRestInterceptor, "post_create_dataset" + ) as post, mock.patch.object( + transports.MapsPlatformDatasetsRestInterceptor, "pre_create_dataset" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = maps_platform_datasets.CreateDatasetRequest.pb( + maps_platform_datasets.CreateDatasetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gmm_dataset.Dataset.to_json(gmm_dataset.Dataset()) + + request = maps_platform_datasets.CreateDatasetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gmm_dataset.Dataset() + + client.create_dataset( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_dataset_rest_bad_request( + transport: str = "rest", request_type=maps_platform_datasets.CreateDatasetRequest +): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["dataset"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "version_id": "version_id_value", + "usage": [1], + "local_file_source": {"filename": "filename_value", "file_format": 1}, + "gcs_source": {"input_uri": "input_uri_value", "file_format": 1}, + "status": {"state": 1, "error_message": "error_message_value"}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "version_create_time": {}, + "version_description": "version_description_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_dataset(request) + + +def test_create_dataset_rest_flattened(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gmm_dataset.Dataset() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + dataset=gmm_dataset.Dataset(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gmm_dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_dataset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*}/datasets" % client.transport._host, args[1] + ) + + +def test_create_dataset_rest_flattened_error(transport: str = "rest"): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dataset( + maps_platform_datasets.CreateDatasetRequest(), + parent="parent_value", + dataset=gmm_dataset.Dataset(name="name_value"), + ) + + +def test_create_dataset_rest_error(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.UpdateDatasetMetadataRequest, + dict, + ], +) +def test_update_dataset_metadata_rest(request_type): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"dataset": {"name": "projects/sample1/datasets/sample2"}} + request_init["dataset"] = { + "name": "projects/sample1/datasets/sample2", + "display_name": "display_name_value", + "description": "description_value", + "version_id": "version_id_value", + "usage": [1], + "local_file_source": {"filename": "filename_value", "file_format": 1}, + "gcs_source": {"input_uri": "input_uri_value", "file_format": 1}, + "status": {"state": 1, "error_message": "error_message_value"}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "version_create_time": {}, + "version_description": "version_description_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gmm_dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + version_description="version_description_value", + local_file_source=data_source.LocalFileSource(filename="filename_value"), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gmm_dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_dataset_metadata(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gmm_dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.version_description == "version_description_value" + + +def test_update_dataset_metadata_rest_required_fields( + request_type=maps_platform_datasets.UpdateDatasetMetadataRequest, +): + transport_class = transports.MapsPlatformDatasetsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dataset_metadata._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dataset_metadata._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gmm_dataset.Dataset() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gmm_dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_dataset_metadata(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_dataset_metadata_rest_unset_required_fields(): + transport = transports.MapsPlatformDatasetsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_dataset_metadata._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("dataset",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_dataset_metadata_rest_interceptors(null_interceptor): + transport = transports.MapsPlatformDatasetsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MapsPlatformDatasetsRestInterceptor(), + ) + client = MapsPlatformDatasetsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MapsPlatformDatasetsRestInterceptor, "post_update_dataset_metadata" + ) as post, mock.patch.object( + transports.MapsPlatformDatasetsRestInterceptor, "pre_update_dataset_metadata" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = maps_platform_datasets.UpdateDatasetMetadataRequest.pb( + maps_platform_datasets.UpdateDatasetMetadataRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gmm_dataset.Dataset.to_json(gmm_dataset.Dataset()) + + request = maps_platform_datasets.UpdateDatasetMetadataRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gmm_dataset.Dataset() + + client.update_dataset_metadata( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_dataset_metadata_rest_bad_request( + transport: str = "rest", + request_type=maps_platform_datasets.UpdateDatasetMetadataRequest, +): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"dataset": {"name": "projects/sample1/datasets/sample2"}} + request_init["dataset"] = { + "name": "projects/sample1/datasets/sample2", + "display_name": "display_name_value", + "description": "description_value", + "version_id": "version_id_value", + "usage": [1], + "local_file_source": {"filename": "filename_value", "file_format": 1}, + "gcs_source": {"input_uri": "input_uri_value", "file_format": 1}, + "status": {"state": 1, "error_message": "error_message_value"}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "version_create_time": {}, + "version_description": "version_description_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_dataset_metadata(request) + + +def test_update_dataset_metadata_rest_flattened(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gmm_dataset.Dataset() + + # get arguments that satisfy an http rule for this method + sample_request = {"dataset": {"name": "projects/sample1/datasets/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + dataset=gmm_dataset.Dataset(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gmm_dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_dataset_metadata(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{dataset.name=projects/*/datasets/*}" % client.transport._host, + args[1], + ) + + +def test_update_dataset_metadata_rest_flattened_error(transport: str = "rest"): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_dataset_metadata( + maps_platform_datasets.UpdateDatasetMetadataRequest(), + dataset=gmm_dataset.Dataset(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_dataset_metadata_rest_error(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.GetDatasetRequest, + dict, + ], +) +def test_get_dataset_rest(request_type): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/datasets/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + version_description="version_description_value", + local_file_source=data_source.LocalFileSource(filename="filename_value"), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_dataset(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.version_description == "version_description_value" + + +def test_get_dataset_rest_required_fields( + request_type=maps_platform_datasets.GetDatasetRequest, +): + transport_class = transports.MapsPlatformDatasetsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dataset.Dataset() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_dataset(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_dataset_rest_unset_required_fields(): + transport = transports.MapsPlatformDatasetsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_dataset._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_dataset_rest_interceptors(null_interceptor): + transport = transports.MapsPlatformDatasetsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MapsPlatformDatasetsRestInterceptor(), + ) + client = MapsPlatformDatasetsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MapsPlatformDatasetsRestInterceptor, "post_get_dataset" + ) as post, mock.patch.object( + transports.MapsPlatformDatasetsRestInterceptor, "pre_get_dataset" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = maps_platform_datasets.GetDatasetRequest.pb( + maps_platform_datasets.GetDatasetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dataset.Dataset.to_json(dataset.Dataset()) + + request = maps_platform_datasets.GetDatasetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dataset.Dataset() + + client.get_dataset( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_dataset_rest_bad_request( + transport: str = "rest", request_type=maps_platform_datasets.GetDatasetRequest +): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/datasets/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_dataset(request) + + +def test_get_dataset_rest_flattened(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dataset.Dataset() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/datasets/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_dataset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/datasets/*}" % client.transport._host, args[1] + ) + + +def test_get_dataset_rest_flattened_error(transport: str = "rest"): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dataset( + maps_platform_datasets.GetDatasetRequest(), + name="name_value", + ) + + +def test_get_dataset_rest_error(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.ListDatasetsRequest, + dict, + ], +) +def test_list_datasets_rest(request_type): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = maps_platform_datasets.ListDatasetsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = maps_platform_datasets.ListDatasetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_datasets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatasetsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_datasets_rest_required_fields( + request_type=maps_platform_datasets.ListDatasetsRequest, +): + transport_class = transports.MapsPlatformDatasetsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_datasets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_datasets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = maps_platform_datasets.ListDatasetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = maps_platform_datasets.ListDatasetsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_datasets(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_datasets_rest_unset_required_fields(): + transport = transports.MapsPlatformDatasetsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_datasets._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_datasets_rest_interceptors(null_interceptor): + transport = transports.MapsPlatformDatasetsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MapsPlatformDatasetsRestInterceptor(), + ) + client = MapsPlatformDatasetsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MapsPlatformDatasetsRestInterceptor, "post_list_datasets" + ) as post, mock.patch.object( + transports.MapsPlatformDatasetsRestInterceptor, "pre_list_datasets" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = maps_platform_datasets.ListDatasetsRequest.pb( + maps_platform_datasets.ListDatasetsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = maps_platform_datasets.ListDatasetsResponse.to_json( + maps_platform_datasets.ListDatasetsResponse() + ) + + request = maps_platform_datasets.ListDatasetsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = maps_platform_datasets.ListDatasetsResponse() + + client.list_datasets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_datasets_rest_bad_request( + transport: str = "rest", request_type=maps_platform_datasets.ListDatasetsRequest +): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_datasets(request) + + +def test_list_datasets_rest_flattened(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = maps_platform_datasets.ListDatasetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = maps_platform_datasets.ListDatasetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_datasets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*}/datasets" % client.transport._host, args[1] + ) + + +def test_list_datasets_rest_flattened_error(transport: str = "rest"): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_datasets( + maps_platform_datasets.ListDatasetsRequest(), + parent="parent_value", + ) + + +def test_list_datasets_rest_pager(transport: str = "rest"): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], + next_page_token="abc", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[], + next_page_token="def", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + maps_platform_datasets.ListDatasetsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1"} + + pager = client.list_datasets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dataset.Dataset) for i in results) + + pages = list(client.list_datasets(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.DeleteDatasetRequest, + dict, + ], +) +def test_delete_dataset_rest(request_type): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/datasets/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_dataset(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dataset_rest_required_fields( + request_type=maps_platform_datasets.DeleteDatasetRequest, +): + transport_class = transports.MapsPlatformDatasetsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_dataset(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_dataset_rest_unset_required_fields(): + transport = transports.MapsPlatformDatasetsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_dataset._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dataset_rest_interceptors(null_interceptor): + transport = transports.MapsPlatformDatasetsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MapsPlatformDatasetsRestInterceptor(), + ) + client = MapsPlatformDatasetsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MapsPlatformDatasetsRestInterceptor, "pre_delete_dataset" + ) as pre: + pre.assert_not_called() + pb_message = maps_platform_datasets.DeleteDatasetRequest.pb( + maps_platform_datasets.DeleteDatasetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = maps_platform_datasets.DeleteDatasetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_dataset( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_dataset_rest_bad_request( + transport: str = "rest", request_type=maps_platform_datasets.DeleteDatasetRequest +): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/datasets/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_dataset(request) + + +def test_delete_dataset_rest_flattened(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/datasets/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_dataset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/datasets/*}" % client.transport._host, args[1] + ) + + +def test_delete_dataset_rest_flattened_error(transport: str = "rest"): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dataset( + maps_platform_datasets.DeleteDatasetRequest(), + name="name_value", + ) + + +def test_delete_dataset_rest_error(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MapsPlatformDatasetsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MapsPlatformDatasetsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MapsPlatformDatasetsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MapsPlatformDatasetsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MapsPlatformDatasetsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MapsPlatformDatasetsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MapsPlatformDatasetsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MapsPlatformDatasetsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MapsPlatformDatasetsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MapsPlatformDatasetsClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MapsPlatformDatasetsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MapsPlatformDatasetsGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MapsPlatformDatasetsGrpcTransport, + transports.MapsPlatformDatasetsGrpcAsyncIOTransport, + transports.MapsPlatformDatasetsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = MapsPlatformDatasetsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MapsPlatformDatasetsGrpcTransport, + ) + + +def test_maps_platform_datasets_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MapsPlatformDatasetsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_maps_platform_datasets_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets.transports.MapsPlatformDatasetsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.MapsPlatformDatasetsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_dataset", + "update_dataset_metadata", + "get_dataset", + "list_datasets", + "delete_dataset", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_maps_platform_datasets_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets.transports.MapsPlatformDatasetsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MapsPlatformDatasetsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_maps_platform_datasets_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.maps.mapsplatformdatasets_v1.services.maps_platform_datasets.transports.MapsPlatformDatasetsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MapsPlatformDatasetsTransport() + adc.assert_called_once() + + +def test_maps_platform_datasets_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MapsPlatformDatasetsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MapsPlatformDatasetsGrpcTransport, + transports.MapsPlatformDatasetsGrpcAsyncIOTransport, + ], +) +def test_maps_platform_datasets_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MapsPlatformDatasetsGrpcTransport, + transports.MapsPlatformDatasetsGrpcAsyncIOTransport, + transports.MapsPlatformDatasetsRestTransport, + ], +) +def test_maps_platform_datasets_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MapsPlatformDatasetsGrpcTransport, grpc_helpers), + (transports.MapsPlatformDatasetsGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_maps_platform_datasets_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "mapsplatformdatasets.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="mapsplatformdatasets.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MapsPlatformDatasetsGrpcTransport, + transports.MapsPlatformDatasetsGrpcAsyncIOTransport, + ], +) +def test_maps_platform_datasets_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_maps_platform_datasets_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MapsPlatformDatasetsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_maps_platform_datasets_host_no_port(transport_name): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="mapsplatformdatasets.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "mapsplatformdatasets.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://mapsplatformdatasets.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_maps_platform_datasets_host_with_port(transport_name): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="mapsplatformdatasets.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "mapsplatformdatasets.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://mapsplatformdatasets.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_maps_platform_datasets_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MapsPlatformDatasetsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = MapsPlatformDatasetsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_dataset._session + session2 = client2.transport.create_dataset._session + assert session1 != session2 + session1 = client1.transport.update_dataset_metadata._session + session2 = client2.transport.update_dataset_metadata._session + assert session1 != session2 + session1 = client1.transport.get_dataset._session + session2 = client2.transport.get_dataset._session + assert session1 != session2 + session1 = client1.transport.list_datasets._session + session2 = client2.transport.list_datasets._session + assert session1 != session2 + session1 = client1.transport.delete_dataset._session + session2 = client2.transport.delete_dataset._session + assert session1 != session2 + + +def test_maps_platform_datasets_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MapsPlatformDatasetsGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_maps_platform_datasets_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MapsPlatformDatasetsGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MapsPlatformDatasetsGrpcTransport, + transports.MapsPlatformDatasetsGrpcAsyncIOTransport, + ], +) +def test_maps_platform_datasets_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MapsPlatformDatasetsGrpcTransport, + transports.MapsPlatformDatasetsGrpcAsyncIOTransport, + ], +) +def test_maps_platform_datasets_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_dataset_path(): + project = "squid" + dataset = "clam" + expected = "projects/{project}/datasets/{dataset}".format( + project=project, + dataset=dataset, + ) + actual = MapsPlatformDatasetsClient.dataset_path(project, dataset) + assert expected == actual + + +def test_parse_dataset_path(): + expected = { + "project": "whelk", + "dataset": "octopus", + } + path = MapsPlatformDatasetsClient.dataset_path(**expected) + + # Check that the path construction is reversible. + actual = MapsPlatformDatasetsClient.parse_dataset_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = MapsPlatformDatasetsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = MapsPlatformDatasetsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MapsPlatformDatasetsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = MapsPlatformDatasetsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = MapsPlatformDatasetsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MapsPlatformDatasetsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MapsPlatformDatasetsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = MapsPlatformDatasetsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MapsPlatformDatasetsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = MapsPlatformDatasetsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = MapsPlatformDatasetsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MapsPlatformDatasetsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = MapsPlatformDatasetsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = MapsPlatformDatasetsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MapsPlatformDatasetsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.MapsPlatformDatasetsTransport, "_prep_wrapped_messages" + ) as prep: + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.MapsPlatformDatasetsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = MapsPlatformDatasetsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = MapsPlatformDatasetsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = MapsPlatformDatasetsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (MapsPlatformDatasetsClient, transports.MapsPlatformDatasetsGrpcTransport), + ( + MapsPlatformDatasetsAsyncClient, + transports.MapsPlatformDatasetsGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1alpha/__init__.py b/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1alpha/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1alpha/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1alpha/test_maps_platform_datasets_v1_alpha.py b/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1alpha/test_maps_platform_datasets_v1_alpha.py new file mode 100644 index 000000000000..1350b48965d6 --- /dev/null +++ b/packages/google-maps-mapsplatformdatasets/tests/unit/gapic/mapsplatformdatasets_v1alpha/test_maps_platform_datasets_v1_alpha.py @@ -0,0 +1,5785 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha import ( + MapsPlatformDatasetsV1AlphaAsyncClient, + MapsPlatformDatasetsV1AlphaClient, + pagers, + transports, +) +from google.maps.mapsplatformdatasets_v1alpha.types import dataset as gmm_dataset +from google.maps.mapsplatformdatasets_v1alpha.types import maps_platform_datasets +from google.maps.mapsplatformdatasets_v1alpha.types import data_source +from google.maps.mapsplatformdatasets_v1alpha.types import dataset + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MapsPlatformDatasetsV1AlphaClient._get_default_mtls_endpoint(None) is None + assert ( + MapsPlatformDatasetsV1AlphaClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + MapsPlatformDatasetsV1AlphaClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + MapsPlatformDatasetsV1AlphaClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MapsPlatformDatasetsV1AlphaClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + MapsPlatformDatasetsV1AlphaClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MapsPlatformDatasetsV1AlphaClient, "grpc"), + (MapsPlatformDatasetsV1AlphaAsyncClient, "grpc_asyncio"), + (MapsPlatformDatasetsV1AlphaClient, "rest"), + ], +) +def test_maps_platform_datasets_v1_alpha_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "mapsplatformdatasets.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://mapsplatformdatasets.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MapsPlatformDatasetsV1AlphaGrpcTransport, "grpc"), + (transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MapsPlatformDatasetsV1AlphaRestTransport, "rest"), + ], +) +def test_maps_platform_datasets_v1_alpha_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MapsPlatformDatasetsV1AlphaClient, "grpc"), + (MapsPlatformDatasetsV1AlphaAsyncClient, "grpc_asyncio"), + (MapsPlatformDatasetsV1AlphaClient, "rest"), + ], +) +def test_maps_platform_datasets_v1_alpha_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "mapsplatformdatasets.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://mapsplatformdatasets.googleapis.com" + ) + + +def test_maps_platform_datasets_v1_alpha_client_get_transport_class(): + transport = MapsPlatformDatasetsV1AlphaClient.get_transport_class() + available_transports = [ + transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + transports.MapsPlatformDatasetsV1AlphaRestTransport, + ] + assert transport in available_transports + + transport = MapsPlatformDatasetsV1AlphaClient.get_transport_class("grpc") + assert transport == transports.MapsPlatformDatasetsV1AlphaGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MapsPlatformDatasetsV1AlphaClient, + transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + "grpc", + ), + ( + MapsPlatformDatasetsV1AlphaAsyncClient, + transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + MapsPlatformDatasetsV1AlphaClient, + transports.MapsPlatformDatasetsV1AlphaRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + MapsPlatformDatasetsV1AlphaClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MapsPlatformDatasetsV1AlphaClient), +) +@mock.patch.object( + MapsPlatformDatasetsV1AlphaAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MapsPlatformDatasetsV1AlphaAsyncClient), +) +def test_maps_platform_datasets_v1_alpha_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + MapsPlatformDatasetsV1AlphaClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + MapsPlatformDatasetsV1AlphaClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + MapsPlatformDatasetsV1AlphaClient, + transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + "grpc", + "true", + ), + ( + MapsPlatformDatasetsV1AlphaAsyncClient, + transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + MapsPlatformDatasetsV1AlphaClient, + transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + "grpc", + "false", + ), + ( + MapsPlatformDatasetsV1AlphaAsyncClient, + transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + MapsPlatformDatasetsV1AlphaClient, + transports.MapsPlatformDatasetsV1AlphaRestTransport, + "rest", + "true", + ), + ( + MapsPlatformDatasetsV1AlphaClient, + transports.MapsPlatformDatasetsV1AlphaRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + MapsPlatformDatasetsV1AlphaClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MapsPlatformDatasetsV1AlphaClient), +) +@mock.patch.object( + MapsPlatformDatasetsV1AlphaAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MapsPlatformDatasetsV1AlphaAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_maps_platform_datasets_v1_alpha_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [MapsPlatformDatasetsV1AlphaClient, MapsPlatformDatasetsV1AlphaAsyncClient], +) +@mock.patch.object( + MapsPlatformDatasetsV1AlphaClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MapsPlatformDatasetsV1AlphaClient), +) +@mock.patch.object( + MapsPlatformDatasetsV1AlphaAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MapsPlatformDatasetsV1AlphaAsyncClient), +) +def test_maps_platform_datasets_v1_alpha_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MapsPlatformDatasetsV1AlphaClient, + transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + "grpc", + ), + ( + MapsPlatformDatasetsV1AlphaAsyncClient, + transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + MapsPlatformDatasetsV1AlphaClient, + transports.MapsPlatformDatasetsV1AlphaRestTransport, + "rest", + ), + ], +) +def test_maps_platform_datasets_v1_alpha_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MapsPlatformDatasetsV1AlphaClient, + transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MapsPlatformDatasetsV1AlphaAsyncClient, + transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + MapsPlatformDatasetsV1AlphaClient, + transports.MapsPlatformDatasetsV1AlphaRestTransport, + "rest", + None, + ), + ], +) +def test_maps_platform_datasets_v1_alpha_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_maps_platform_datasets_v1_alpha_client_client_options_from_dict(): + with mock.patch( + "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.transports.MapsPlatformDatasetsV1AlphaGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = MapsPlatformDatasetsV1AlphaClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MapsPlatformDatasetsV1AlphaClient, + transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MapsPlatformDatasetsV1AlphaAsyncClient, + transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_maps_platform_datasets_v1_alpha_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "mapsplatformdatasets.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="mapsplatformdatasets.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.CreateDatasetRequest, + dict, + ], +) +def test_create_dataset(request_type, transport: str = "grpc"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gmm_dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + status=gmm_dataset.State.STATE_IMPORTING, + version_description="version_description_value", + local_file_source=data_source.LocalFileSource(filename="filename_value"), + ) + response = client.create_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.CreateDatasetRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gmm_dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.status == gmm_dataset.State.STATE_IMPORTING + assert response.version_description == "version_description_value" + + +def test_create_dataset_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + client.create_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.CreateDatasetRequest() + + +@pytest.mark.asyncio +async def test_create_dataset_async( + transport: str = "grpc_asyncio", + request_type=maps_platform_datasets.CreateDatasetRequest, +): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gmm_dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + status=gmm_dataset.State.STATE_IMPORTING, + version_description="version_description_value", + ) + ) + response = await client.create_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.CreateDatasetRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gmm_dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.status == gmm_dataset.State.STATE_IMPORTING + assert response.version_description == "version_description_value" + + +@pytest.mark.asyncio +async def test_create_dataset_async_from_dict(): + await test_create_dataset_async(request_type=dict) + + +def test_create_dataset_field_headers(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.CreateDatasetRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + call.return_value = gmm_dataset.Dataset() + client.create_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_dataset_field_headers_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.CreateDatasetRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gmm_dataset.Dataset()) + await client.create_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_dataset_flattened(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gmm_dataset.Dataset() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_dataset( + parent="parent_value", + dataset=gmm_dataset.Dataset(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].dataset + mock_val = gmm_dataset.Dataset(name="name_value") + assert arg == mock_val + + +def test_create_dataset_flattened_error(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dataset( + maps_platform_datasets.CreateDatasetRequest(), + parent="parent_value", + dataset=gmm_dataset.Dataset(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_dataset_flattened_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gmm_dataset.Dataset() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gmm_dataset.Dataset()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_dataset( + parent="parent_value", + dataset=gmm_dataset.Dataset(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].dataset + mock_val = gmm_dataset.Dataset(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_dataset_flattened_error_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_dataset( + maps_platform_datasets.CreateDatasetRequest(), + parent="parent_value", + dataset=gmm_dataset.Dataset(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.UpdateDatasetMetadataRequest, + dict, + ], +) +def test_update_dataset_metadata(request_type, transport: str = "grpc"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dataset_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gmm_dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + status=gmm_dataset.State.STATE_IMPORTING, + version_description="version_description_value", + local_file_source=data_source.LocalFileSource(filename="filename_value"), + ) + response = client.update_dataset_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.UpdateDatasetMetadataRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gmm_dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.status == gmm_dataset.State.STATE_IMPORTING + assert response.version_description == "version_description_value" + + +def test_update_dataset_metadata_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dataset_metadata), "__call__" + ) as call: + client.update_dataset_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.UpdateDatasetMetadataRequest() + + +@pytest.mark.asyncio +async def test_update_dataset_metadata_async( + transport: str = "grpc_asyncio", + request_type=maps_platform_datasets.UpdateDatasetMetadataRequest, +): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dataset_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gmm_dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + status=gmm_dataset.State.STATE_IMPORTING, + version_description="version_description_value", + ) + ) + response = await client.update_dataset_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.UpdateDatasetMetadataRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gmm_dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.status == gmm_dataset.State.STATE_IMPORTING + assert response.version_description == "version_description_value" + + +@pytest.mark.asyncio +async def test_update_dataset_metadata_async_from_dict(): + await test_update_dataset_metadata_async(request_type=dict) + + +def test_update_dataset_metadata_field_headers(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.UpdateDatasetMetadataRequest() + + request.dataset.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dataset_metadata), "__call__" + ) as call: + call.return_value = gmm_dataset.Dataset() + client.update_dataset_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "dataset.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_dataset_metadata_field_headers_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.UpdateDatasetMetadataRequest() + + request.dataset.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dataset_metadata), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gmm_dataset.Dataset()) + await client.update_dataset_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "dataset.name=name_value", + ) in kw["metadata"] + + +def test_update_dataset_metadata_flattened(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dataset_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gmm_dataset.Dataset() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_dataset_metadata( + dataset=gmm_dataset.Dataset(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].dataset + mock_val = gmm_dataset.Dataset(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_dataset_metadata_flattened_error(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_dataset_metadata( + maps_platform_datasets.UpdateDatasetMetadataRequest(), + dataset=gmm_dataset.Dataset(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_dataset_metadata_flattened_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dataset_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gmm_dataset.Dataset() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gmm_dataset.Dataset()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_dataset_metadata( + dataset=gmm_dataset.Dataset(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].dataset + mock_val = gmm_dataset.Dataset(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_dataset_metadata_flattened_error_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_dataset_metadata( + maps_platform_datasets.UpdateDatasetMetadataRequest(), + dataset=gmm_dataset.Dataset(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.GetDatasetRequest, + dict, + ], +) +def test_get_dataset(request_type, transport: str = "grpc"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + status=dataset.State.STATE_IMPORTING, + version_description="version_description_value", + local_file_source=data_source.LocalFileSource(filename="filename_value"), + ) + response = client.get_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.GetDatasetRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.status == dataset.State.STATE_IMPORTING + assert response.version_description == "version_description_value" + + +def test_get_dataset_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + client.get_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.GetDatasetRequest() + + +@pytest.mark.asyncio +async def test_get_dataset_async( + transport: str = "grpc_asyncio", + request_type=maps_platform_datasets.GetDatasetRequest, +): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + status=dataset.State.STATE_IMPORTING, + version_description="version_description_value", + ) + ) + response = await client.get_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.GetDatasetRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.status == dataset.State.STATE_IMPORTING + assert response.version_description == "version_description_value" + + +@pytest.mark.asyncio +async def test_get_dataset_async_from_dict(): + await test_get_dataset_async(request_type=dict) + + +def test_get_dataset_field_headers(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.GetDatasetRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + call.return_value = dataset.Dataset() + client.get_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_dataset_field_headers_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.GetDatasetRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset()) + await client.get_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_dataset_flattened(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.Dataset() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_dataset( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_dataset_flattened_error(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dataset( + maps_platform_datasets.GetDatasetRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_dataset_flattened_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.Dataset() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_dataset( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_dataset_flattened_error_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_dataset( + maps_platform_datasets.GetDatasetRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.ListDatasetVersionsRequest, + dict, + ], +) +def test_list_dataset_versions(request_type, transport: str = "grpc"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dataset_versions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = maps_platform_datasets.ListDatasetVersionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_dataset_versions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.ListDatasetVersionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatasetVersionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_dataset_versions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dataset_versions), "__call__" + ) as call: + client.list_dataset_versions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.ListDatasetVersionsRequest() + + +@pytest.mark.asyncio +async def test_list_dataset_versions_async( + transport: str = "grpc_asyncio", + request_type=maps_platform_datasets.ListDatasetVersionsRequest, +): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dataset_versions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + maps_platform_datasets.ListDatasetVersionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_dataset_versions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.ListDatasetVersionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatasetVersionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_dataset_versions_async_from_dict(): + await test_list_dataset_versions_async(request_type=dict) + + +def test_list_dataset_versions_field_headers(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.ListDatasetVersionsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dataset_versions), "__call__" + ) as call: + call.return_value = maps_platform_datasets.ListDatasetVersionsResponse() + client.list_dataset_versions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_dataset_versions_field_headers_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.ListDatasetVersionsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dataset_versions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + maps_platform_datasets.ListDatasetVersionsResponse() + ) + await client.list_dataset_versions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_list_dataset_versions_flattened(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dataset_versions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = maps_platform_datasets.ListDatasetVersionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_dataset_versions( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_list_dataset_versions_flattened_error(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dataset_versions( + maps_platform_datasets.ListDatasetVersionsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_list_dataset_versions_flattened_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dataset_versions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = maps_platform_datasets.ListDatasetVersionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + maps_platform_datasets.ListDatasetVersionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_dataset_versions( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_dataset_versions_flattened_error_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_dataset_versions( + maps_platform_datasets.ListDatasetVersionsRequest(), + name="name_value", + ) + + +def test_list_dataset_versions_pager(transport_name: str = "grpc"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dataset_versions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], + next_page_token="abc", + ), + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[], + next_page_token="def", + ), + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), + ) + pager = client.list_dataset_versions(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dataset.Dataset) for i in results) + + +def test_list_dataset_versions_pages(transport_name: str = "grpc"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dataset_versions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], + next_page_token="abc", + ), + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[], + next_page_token="def", + ), + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], + ), + RuntimeError, + ) + pages = list(client.list_dataset_versions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_dataset_versions_async_pager(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dataset_versions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], + next_page_token="abc", + ), + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[], + next_page_token="def", + ), + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_dataset_versions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dataset.Dataset) for i in responses) + + +@pytest.mark.asyncio +async def test_list_dataset_versions_async_pages(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dataset_versions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], + next_page_token="abc", + ), + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[], + next_page_token="def", + ), + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_dataset_versions(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.ListDatasetsRequest, + dict, + ], +) +def test_list_datasets(request_type, transport: str = "grpc"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = maps_platform_datasets.ListDatasetsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.ListDatasetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatasetsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_datasets_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + client.list_datasets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.ListDatasetsRequest() + + +@pytest.mark.asyncio +async def test_list_datasets_async( + transport: str = "grpc_asyncio", + request_type=maps_platform_datasets.ListDatasetsRequest, +): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + maps_platform_datasets.ListDatasetsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.ListDatasetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatasetsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_datasets_async_from_dict(): + await test_list_datasets_async(request_type=dict) + + +def test_list_datasets_field_headers(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.ListDatasetsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + call.return_value = maps_platform_datasets.ListDatasetsResponse() + client.list_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_datasets_field_headers_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.ListDatasetsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + maps_platform_datasets.ListDatasetsResponse() + ) + await client.list_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_datasets_flattened(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = maps_platform_datasets.ListDatasetsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_datasets( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_datasets_flattened_error(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_datasets( + maps_platform_datasets.ListDatasetsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_datasets_flattened_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = maps_platform_datasets.ListDatasetsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + maps_platform_datasets.ListDatasetsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_datasets( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_datasets_flattened_error_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_datasets( + maps_platform_datasets.ListDatasetsRequest(), + parent="parent_value", + ) + + +def test_list_datasets_pager(transport_name: str = "grpc"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], + next_page_token="abc", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[], + next_page_token="def", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_datasets(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dataset.Dataset) for i in results) + + +def test_list_datasets_pages(transport_name: str = "grpc"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], + next_page_token="abc", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[], + next_page_token="def", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], + ), + RuntimeError, + ) + pages = list(client.list_datasets(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_datasets_async_pager(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_datasets), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], + next_page_token="abc", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[], + next_page_token="def", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_datasets( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dataset.Dataset) for i in responses) + + +@pytest.mark.asyncio +async def test_list_datasets_async_pages(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_datasets), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], + next_page_token="abc", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[], + next_page_token="def", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_datasets(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.DeleteDatasetRequest, + dict, + ], +) +def test_delete_dataset(request_type, transport: str = "grpc"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.DeleteDatasetRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dataset_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + client.delete_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.DeleteDatasetRequest() + + +@pytest.mark.asyncio +async def test_delete_dataset_async( + transport: str = "grpc_asyncio", + request_type=maps_platform_datasets.DeleteDatasetRequest, +): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.DeleteDatasetRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_dataset_async_from_dict(): + await test_delete_dataset_async(request_type=dict) + + +def test_delete_dataset_field_headers(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.DeleteDatasetRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + call.return_value = None + client.delete_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_dataset_field_headers_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.DeleteDatasetRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_dataset_flattened(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_dataset( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_dataset_flattened_error(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dataset( + maps_platform_datasets.DeleteDatasetRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_dataset_flattened_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_dataset( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_dataset_flattened_error_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_dataset( + maps_platform_datasets.DeleteDatasetRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.DeleteDatasetVersionRequest, + dict, + ], +) +def test_delete_dataset_version(request_type, transport: str = "grpc"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dataset_version), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_dataset_version(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.DeleteDatasetVersionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dataset_version_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dataset_version), "__call__" + ) as call: + client.delete_dataset_version() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.DeleteDatasetVersionRequest() + + +@pytest.mark.asyncio +async def test_delete_dataset_version_async( + transport: str = "grpc_asyncio", + request_type=maps_platform_datasets.DeleteDatasetVersionRequest, +): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dataset_version), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_dataset_version(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == maps_platform_datasets.DeleteDatasetVersionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_dataset_version_async_from_dict(): + await test_delete_dataset_version_async(request_type=dict) + + +def test_delete_dataset_version_field_headers(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.DeleteDatasetVersionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dataset_version), "__call__" + ) as call: + call.return_value = None + client.delete_dataset_version(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_dataset_version_field_headers_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = maps_platform_datasets.DeleteDatasetVersionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dataset_version), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_dataset_version(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_dataset_version_flattened(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dataset_version), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_dataset_version( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_dataset_version_flattened_error(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dataset_version( + maps_platform_datasets.DeleteDatasetVersionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_dataset_version_flattened_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dataset_version), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_dataset_version( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_dataset_version_flattened_error_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_dataset_version( + maps_platform_datasets.DeleteDatasetVersionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.CreateDatasetRequest, + dict, + ], +) +def test_create_dataset_rest(request_type): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["dataset"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "version_id": "version_id_value", + "usage": [1], + "local_file_source": {"filename": "filename_value", "file_format": 1}, + "gcs_source": {"input_uri": "input_uri_value", "file_format": 1}, + "status": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "version_create_time": {}, + "version_description": "version_description_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gmm_dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + status=gmm_dataset.State.STATE_IMPORTING, + version_description="version_description_value", + local_file_source=data_source.LocalFileSource(filename="filename_value"), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gmm_dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_dataset(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gmm_dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.status == gmm_dataset.State.STATE_IMPORTING + assert response.version_description == "version_description_value" + + +def test_create_dataset_rest_required_fields( + request_type=maps_platform_datasets.CreateDatasetRequest, +): + transport_class = transports.MapsPlatformDatasetsV1AlphaRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gmm_dataset.Dataset() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gmm_dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_dataset(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_dataset_rest_unset_required_fields(): + transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_dataset._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "dataset", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_dataset_rest_interceptors(null_interceptor): + transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MapsPlatformDatasetsV1AlphaRestInterceptor(), + ) + client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MapsPlatformDatasetsV1AlphaRestInterceptor, "post_create_dataset" + ) as post, mock.patch.object( + transports.MapsPlatformDatasetsV1AlphaRestInterceptor, "pre_create_dataset" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = maps_platform_datasets.CreateDatasetRequest.pb( + maps_platform_datasets.CreateDatasetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gmm_dataset.Dataset.to_json(gmm_dataset.Dataset()) + + request = maps_platform_datasets.CreateDatasetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gmm_dataset.Dataset() + + client.create_dataset( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_dataset_rest_bad_request( + transport: str = "rest", request_type=maps_platform_datasets.CreateDatasetRequest +): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["dataset"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "version_id": "version_id_value", + "usage": [1], + "local_file_source": {"filename": "filename_value", "file_format": 1}, + "gcs_source": {"input_uri": "input_uri_value", "file_format": 1}, + "status": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "version_create_time": {}, + "version_description": "version_description_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_dataset(request) + + +def test_create_dataset_rest_flattened(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gmm_dataset.Dataset() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + dataset=gmm_dataset.Dataset(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gmm_dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_dataset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*}/datasets" % client.transport._host, args[1] + ) + + +def test_create_dataset_rest_flattened_error(transport: str = "rest"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dataset( + maps_platform_datasets.CreateDatasetRequest(), + parent="parent_value", + dataset=gmm_dataset.Dataset(name="name_value"), + ) + + +def test_create_dataset_rest_error(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.UpdateDatasetMetadataRequest, + dict, + ], +) +def test_update_dataset_metadata_rest(request_type): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"dataset": {"name": "projects/sample1/datasets/sample2"}} + request_init["dataset"] = { + "name": "projects/sample1/datasets/sample2", + "display_name": "display_name_value", + "description": "description_value", + "version_id": "version_id_value", + "usage": [1], + "local_file_source": {"filename": "filename_value", "file_format": 1}, + "gcs_source": {"input_uri": "input_uri_value", "file_format": 1}, + "status": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "version_create_time": {}, + "version_description": "version_description_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gmm_dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + status=gmm_dataset.State.STATE_IMPORTING, + version_description="version_description_value", + local_file_source=data_source.LocalFileSource(filename="filename_value"), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gmm_dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_dataset_metadata(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gmm_dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [gmm_dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.status == gmm_dataset.State.STATE_IMPORTING + assert response.version_description == "version_description_value" + + +def test_update_dataset_metadata_rest_required_fields( + request_type=maps_platform_datasets.UpdateDatasetMetadataRequest, +): + transport_class = transports.MapsPlatformDatasetsV1AlphaRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dataset_metadata._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dataset_metadata._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gmm_dataset.Dataset() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gmm_dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_dataset_metadata(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_dataset_metadata_rest_unset_required_fields(): + transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_dataset_metadata._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("dataset",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_dataset_metadata_rest_interceptors(null_interceptor): + transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MapsPlatformDatasetsV1AlphaRestInterceptor(), + ) + client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MapsPlatformDatasetsV1AlphaRestInterceptor, + "post_update_dataset_metadata", + ) as post, mock.patch.object( + transports.MapsPlatformDatasetsV1AlphaRestInterceptor, + "pre_update_dataset_metadata", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = maps_platform_datasets.UpdateDatasetMetadataRequest.pb( + maps_platform_datasets.UpdateDatasetMetadataRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gmm_dataset.Dataset.to_json(gmm_dataset.Dataset()) + + request = maps_platform_datasets.UpdateDatasetMetadataRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gmm_dataset.Dataset() + + client.update_dataset_metadata( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_dataset_metadata_rest_bad_request( + transport: str = "rest", + request_type=maps_platform_datasets.UpdateDatasetMetadataRequest, +): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"dataset": {"name": "projects/sample1/datasets/sample2"}} + request_init["dataset"] = { + "name": "projects/sample1/datasets/sample2", + "display_name": "display_name_value", + "description": "description_value", + "version_id": "version_id_value", + "usage": [1], + "local_file_source": {"filename": "filename_value", "file_format": 1}, + "gcs_source": {"input_uri": "input_uri_value", "file_format": 1}, + "status": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "version_create_time": {}, + "version_description": "version_description_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_dataset_metadata(request) + + +def test_update_dataset_metadata_rest_flattened(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gmm_dataset.Dataset() + + # get arguments that satisfy an http rule for this method + sample_request = {"dataset": {"name": "projects/sample1/datasets/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + dataset=gmm_dataset.Dataset(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gmm_dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_dataset_metadata(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{dataset.name=projects/*/datasets/*}" % client.transport._host, + args[1], + ) + + +def test_update_dataset_metadata_rest_flattened_error(transport: str = "rest"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_dataset_metadata( + maps_platform_datasets.UpdateDatasetMetadataRequest(), + dataset=gmm_dataset.Dataset(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_dataset_metadata_rest_error(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.GetDatasetRequest, + dict, + ], +) +def test_get_dataset_rest(request_type): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/datasets/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + version_id="version_id_value", + usage=[dataset.Usage.USAGE_DATA_DRIVEN_STYLING], + status=dataset.State.STATE_IMPORTING, + version_description="version_description_value", + local_file_source=data_source.LocalFileSource(filename="filename_value"), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_dataset(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dataset.Dataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.version_id == "version_id_value" + assert response.usage == [dataset.Usage.USAGE_DATA_DRIVEN_STYLING] + assert response.status == dataset.State.STATE_IMPORTING + assert response.version_description == "version_description_value" + + +def test_get_dataset_rest_required_fields( + request_type=maps_platform_datasets.GetDatasetRequest, +): + transport_class = transports.MapsPlatformDatasetsV1AlphaRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dataset._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("published_usage",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dataset.Dataset() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_dataset(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_dataset_rest_unset_required_fields(): + transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_dataset._get_unset_required_fields({}) + assert set(unset_fields) == (set(("publishedUsage",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_dataset_rest_interceptors(null_interceptor): + transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MapsPlatformDatasetsV1AlphaRestInterceptor(), + ) + client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MapsPlatformDatasetsV1AlphaRestInterceptor, "post_get_dataset" + ) as post, mock.patch.object( + transports.MapsPlatformDatasetsV1AlphaRestInterceptor, "pre_get_dataset" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = maps_platform_datasets.GetDatasetRequest.pb( + maps_platform_datasets.GetDatasetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dataset.Dataset.to_json(dataset.Dataset()) + + request = maps_platform_datasets.GetDatasetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dataset.Dataset() + + client.get_dataset( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_dataset_rest_bad_request( + transport: str = "rest", request_type=maps_platform_datasets.GetDatasetRequest +): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/datasets/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_dataset(request) + + +def test_get_dataset_rest_flattened(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dataset.Dataset() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/datasets/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dataset.Dataset.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_dataset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/datasets/*}" % client.transport._host, args[1] + ) + + +def test_get_dataset_rest_flattened_error(transport: str = "rest"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dataset( + maps_platform_datasets.GetDatasetRequest(), + name="name_value", + ) + + +def test_get_dataset_rest_error(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.ListDatasetVersionsRequest, + dict, + ], +) +def test_list_dataset_versions_rest(request_type): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/datasets/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = maps_platform_datasets.ListDatasetVersionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = maps_platform_datasets.ListDatasetVersionsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_dataset_versions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatasetVersionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_dataset_versions_rest_required_fields( + request_type=maps_platform_datasets.ListDatasetVersionsRequest, +): + transport_class = transports.MapsPlatformDatasetsV1AlphaRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dataset_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dataset_versions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = maps_platform_datasets.ListDatasetVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = maps_platform_datasets.ListDatasetVersionsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_dataset_versions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_dataset_versions_rest_unset_required_fields(): + transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_dataset_versions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_dataset_versions_rest_interceptors(null_interceptor): + transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MapsPlatformDatasetsV1AlphaRestInterceptor(), + ) + client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MapsPlatformDatasetsV1AlphaRestInterceptor, + "post_list_dataset_versions", + ) as post, mock.patch.object( + transports.MapsPlatformDatasetsV1AlphaRestInterceptor, + "pre_list_dataset_versions", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = maps_platform_datasets.ListDatasetVersionsRequest.pb( + maps_platform_datasets.ListDatasetVersionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + maps_platform_datasets.ListDatasetVersionsResponse.to_json( + maps_platform_datasets.ListDatasetVersionsResponse() + ) + ) + + request = maps_platform_datasets.ListDatasetVersionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = maps_platform_datasets.ListDatasetVersionsResponse() + + client.list_dataset_versions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_dataset_versions_rest_bad_request( + transport: str = "rest", + request_type=maps_platform_datasets.ListDatasetVersionsRequest, +): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/datasets/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_dataset_versions(request) + + +def test_list_dataset_versions_rest_flattened(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = maps_platform_datasets.ListDatasetVersionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/datasets/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = maps_platform_datasets.ListDatasetVersionsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_dataset_versions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/datasets/*}:listVersions" + % client.transport._host, + args[1], + ) + + +def test_list_dataset_versions_rest_flattened_error(transport: str = "rest"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dataset_versions( + maps_platform_datasets.ListDatasetVersionsRequest(), + name="name_value", + ) + + +def test_list_dataset_versions_rest_pager(transport: str = "rest"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], + next_page_token="abc", + ), + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[], + next_page_token="def", + ), + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + maps_platform_datasets.ListDatasetVersionsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + maps_platform_datasets.ListDatasetVersionsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"name": "projects/sample1/datasets/sample2"} + + pager = client.list_dataset_versions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dataset.Dataset) for i in results) + + pages = list(client.list_dataset_versions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.ListDatasetsRequest, + dict, + ], +) +def test_list_datasets_rest(request_type): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = maps_platform_datasets.ListDatasetsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = maps_platform_datasets.ListDatasetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_datasets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatasetsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_datasets_rest_required_fields( + request_type=maps_platform_datasets.ListDatasetsRequest, +): + transport_class = transports.MapsPlatformDatasetsV1AlphaRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_datasets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_datasets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = maps_platform_datasets.ListDatasetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = maps_platform_datasets.ListDatasetsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_datasets(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_datasets_rest_unset_required_fields(): + transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_datasets._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_datasets_rest_interceptors(null_interceptor): + transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MapsPlatformDatasetsV1AlphaRestInterceptor(), + ) + client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MapsPlatformDatasetsV1AlphaRestInterceptor, "post_list_datasets" + ) as post, mock.patch.object( + transports.MapsPlatformDatasetsV1AlphaRestInterceptor, "pre_list_datasets" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = maps_platform_datasets.ListDatasetsRequest.pb( + maps_platform_datasets.ListDatasetsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = maps_platform_datasets.ListDatasetsResponse.to_json( + maps_platform_datasets.ListDatasetsResponse() + ) + + request = maps_platform_datasets.ListDatasetsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = maps_platform_datasets.ListDatasetsResponse() + + client.list_datasets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_datasets_rest_bad_request( + transport: str = "rest", request_type=maps_platform_datasets.ListDatasetsRequest +): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_datasets(request) + + +def test_list_datasets_rest_flattened(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = maps_platform_datasets.ListDatasetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = maps_platform_datasets.ListDatasetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_datasets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*}/datasets" % client.transport._host, args[1] + ) + + +def test_list_datasets_rest_flattened_error(transport: str = "rest"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_datasets( + maps_platform_datasets.ListDatasetsRequest(), + parent="parent_value", + ) + + +def test_list_datasets_rest_pager(transport: str = "rest"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], + next_page_token="abc", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[], + next_page_token="def", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + maps_platform_datasets.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + maps_platform_datasets.ListDatasetsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1"} + + pager = client.list_datasets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dataset.Dataset) for i in results) + + pages = list(client.list_datasets(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.DeleteDatasetRequest, + dict, + ], +) +def test_delete_dataset_rest(request_type): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/datasets/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_dataset(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dataset_rest_required_fields( + request_type=maps_platform_datasets.DeleteDatasetRequest, +): + transport_class = transports.MapsPlatformDatasetsV1AlphaRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dataset._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_dataset(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_dataset_rest_unset_required_fields(): + transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_dataset._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dataset_rest_interceptors(null_interceptor): + transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MapsPlatformDatasetsV1AlphaRestInterceptor(), + ) + client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MapsPlatformDatasetsV1AlphaRestInterceptor, "pre_delete_dataset" + ) as pre: + pre.assert_not_called() + pb_message = maps_platform_datasets.DeleteDatasetRequest.pb( + maps_platform_datasets.DeleteDatasetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = maps_platform_datasets.DeleteDatasetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_dataset( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_dataset_rest_bad_request( + transport: str = "rest", request_type=maps_platform_datasets.DeleteDatasetRequest +): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/datasets/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_dataset(request) + + +def test_delete_dataset_rest_flattened(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/datasets/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_dataset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/datasets/*}" % client.transport._host, args[1] + ) + + +def test_delete_dataset_rest_flattened_error(transport: str = "rest"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dataset( + maps_platform_datasets.DeleteDatasetRequest(), + name="name_value", + ) + + +def test_delete_dataset_rest_error(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + maps_platform_datasets.DeleteDatasetVersionRequest, + dict, + ], +) +def test_delete_dataset_version_rest(request_type): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/datasets/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_dataset_version(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dataset_version_rest_required_fields( + request_type=maps_platform_datasets.DeleteDatasetVersionRequest, +): + transport_class = transports.MapsPlatformDatasetsV1AlphaRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dataset_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dataset_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_dataset_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_dataset_version_rest_unset_required_fields(): + transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_dataset_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dataset_version_rest_interceptors(null_interceptor): + transport = transports.MapsPlatformDatasetsV1AlphaRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MapsPlatformDatasetsV1AlphaRestInterceptor(), + ) + client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MapsPlatformDatasetsV1AlphaRestInterceptor, + "pre_delete_dataset_version", + ) as pre: + pre.assert_not_called() + pb_message = maps_platform_datasets.DeleteDatasetVersionRequest.pb( + maps_platform_datasets.DeleteDatasetVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = maps_platform_datasets.DeleteDatasetVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_dataset_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_dataset_version_rest_bad_request( + transport: str = "rest", + request_type=maps_platform_datasets.DeleteDatasetVersionRequest, +): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/datasets/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_dataset_version(request) + + +def test_delete_dataset_version_rest_flattened(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/datasets/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_dataset_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/datasets/*}:deleteVersion" + % client.transport._host, + args[1], + ) + + +def test_delete_dataset_version_rest_flattened_error(transport: str = "rest"): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dataset_version( + maps_platform_datasets.DeleteDatasetVersionRequest(), + name="name_value", + ) + + +def test_delete_dataset_version_rest_error(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MapsPlatformDatasetsV1AlphaGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MapsPlatformDatasetsV1AlphaGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MapsPlatformDatasetsV1AlphaClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MapsPlatformDatasetsV1AlphaGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MapsPlatformDatasetsV1AlphaClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MapsPlatformDatasetsV1AlphaClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MapsPlatformDatasetsV1AlphaGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MapsPlatformDatasetsV1AlphaClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MapsPlatformDatasetsV1AlphaGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MapsPlatformDatasetsV1AlphaClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MapsPlatformDatasetsV1AlphaGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + transports.MapsPlatformDatasetsV1AlphaRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = MapsPlatformDatasetsV1AlphaClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + ) + + +def test_maps_platform_datasets_v1_alpha_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MapsPlatformDatasetsV1AlphaTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_maps_platform_datasets_v1_alpha_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.transports.MapsPlatformDatasetsV1AlphaTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.MapsPlatformDatasetsV1AlphaTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_dataset", + "update_dataset_metadata", + "get_dataset", + "list_dataset_versions", + "list_datasets", + "delete_dataset", + "delete_dataset_version", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_maps_platform_datasets_v1_alpha_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.transports.MapsPlatformDatasetsV1AlphaTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MapsPlatformDatasetsV1AlphaTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_maps_platform_datasets_v1_alpha_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.maps.mapsplatformdatasets_v1alpha.services.maps_platform_datasets_v1_alpha.transports.MapsPlatformDatasetsV1AlphaTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MapsPlatformDatasetsV1AlphaTransport() + adc.assert_called_once() + + +def test_maps_platform_datasets_v1_alpha_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MapsPlatformDatasetsV1AlphaClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + ], +) +def test_maps_platform_datasets_v1_alpha_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + transports.MapsPlatformDatasetsV1AlphaRestTransport, + ], +) +def test_maps_platform_datasets_v1_alpha_transport_auth_gdch_credentials( + transport_class, +): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MapsPlatformDatasetsV1AlphaGrpcTransport, grpc_helpers), + ( + transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +def test_maps_platform_datasets_v1_alpha_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "mapsplatformdatasets.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="mapsplatformdatasets.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + ], +) +def test_maps_platform_datasets_v1_alpha_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_maps_platform_datasets_v1_alpha_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MapsPlatformDatasetsV1AlphaRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_maps_platform_datasets_v1_alpha_host_no_port(transport_name): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="mapsplatformdatasets.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "mapsplatformdatasets.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://mapsplatformdatasets.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_maps_platform_datasets_v1_alpha_host_with_port(transport_name): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="mapsplatformdatasets.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "mapsplatformdatasets.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://mapsplatformdatasets.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_maps_platform_datasets_v1_alpha_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MapsPlatformDatasetsV1AlphaClient( + credentials=creds1, + transport=transport_name, + ) + client2 = MapsPlatformDatasetsV1AlphaClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_dataset._session + session2 = client2.transport.create_dataset._session + assert session1 != session2 + session1 = client1.transport.update_dataset_metadata._session + session2 = client2.transport.update_dataset_metadata._session + assert session1 != session2 + session1 = client1.transport.get_dataset._session + session2 = client2.transport.get_dataset._session + assert session1 != session2 + session1 = client1.transport.list_dataset_versions._session + session2 = client2.transport.list_dataset_versions._session + assert session1 != session2 + session1 = client1.transport.list_datasets._session + session2 = client2.transport.list_datasets._session + assert session1 != session2 + session1 = client1.transport.delete_dataset._session + session2 = client2.transport.delete_dataset._session + assert session1 != session2 + session1 = client1.transport.delete_dataset_version._session + session2 = client2.transport.delete_dataset_version._session + assert session1 != session2 + + +def test_maps_platform_datasets_v1_alpha_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MapsPlatformDatasetsV1AlphaGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_maps_platform_datasets_v1_alpha_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + ], +) +def test_maps_platform_datasets_v1_alpha_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + ], +) +def test_maps_platform_datasets_v1_alpha_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_dataset_path(): + project = "squid" + dataset = "clam" + expected = "projects/{project}/datasets/{dataset}".format( + project=project, + dataset=dataset, + ) + actual = MapsPlatformDatasetsV1AlphaClient.dataset_path(project, dataset) + assert expected == actual + + +def test_parse_dataset_path(): + expected = { + "project": "whelk", + "dataset": "octopus", + } + path = MapsPlatformDatasetsV1AlphaClient.dataset_path(**expected) + + # Check that the path construction is reversible. + actual = MapsPlatformDatasetsV1AlphaClient.parse_dataset_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = MapsPlatformDatasetsV1AlphaClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = MapsPlatformDatasetsV1AlphaClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MapsPlatformDatasetsV1AlphaClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = MapsPlatformDatasetsV1AlphaClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = MapsPlatformDatasetsV1AlphaClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MapsPlatformDatasetsV1AlphaClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MapsPlatformDatasetsV1AlphaClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = MapsPlatformDatasetsV1AlphaClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MapsPlatformDatasetsV1AlphaClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = MapsPlatformDatasetsV1AlphaClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = MapsPlatformDatasetsV1AlphaClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MapsPlatformDatasetsV1AlphaClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = MapsPlatformDatasetsV1AlphaClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = MapsPlatformDatasetsV1AlphaClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MapsPlatformDatasetsV1AlphaClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.MapsPlatformDatasetsV1AlphaTransport, "_prep_wrapped_messages" + ) as prep: + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.MapsPlatformDatasetsV1AlphaTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = MapsPlatformDatasetsV1AlphaClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = MapsPlatformDatasetsV1AlphaAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = MapsPlatformDatasetsV1AlphaClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + MapsPlatformDatasetsV1AlphaClient, + transports.MapsPlatformDatasetsV1AlphaGrpcTransport, + ), + ( + MapsPlatformDatasetsV1AlphaAsyncClient, + transports.MapsPlatformDatasetsV1AlphaGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-maps-places/.OwlBot.yaml b/packages/google-maps-places/.OwlBot.yaml new file mode 100644 index 000000000000..53b1b3d93961 --- /dev/null +++ b/packages/google-maps-places/.OwlBot.yaml @@ -0,0 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/maps/places/(.*)/.*-py + dest: /owl-bot-staging/google-maps-places/$1 diff --git a/packages/google-maps-places/.coveragerc b/packages/google-maps-places/.coveragerc new file mode 100644 index 000000000000..85cb2db49afb --- /dev/null +++ b/packages/google-maps-places/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/maps/places/__init__.py + google/maps/places/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-maps-places/.flake8 b/packages/google-maps-places/.flake8 new file mode 100644 index 000000000000..2e438749863d --- /dev/null +++ b/packages/google-maps-places/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-maps-places/.gitignore b/packages/google-maps-places/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-maps-places/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-maps-places/.repo-metadata.json b/packages/google-maps-places/.repo-metadata.json new file mode 100644 index 000000000000..1fe63ae2bd89 --- /dev/null +++ b/packages/google-maps-places/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "places", + "name_pretty": "Places API", + "api_description": "The Places API allows developers to access a variety of search and retrieval endpoints for a Place.", + "product_documentation": "https://developers.google.com/maps/documentation/places/web-service/", + "client_documentation": "https://googleapis.dev/python/places/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-maps-places", + "api_id": "places.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "places" +} diff --git a/packages/google-maps-places/CHANGELOG.md b/packages/google-maps-places/CHANGELOG.md new file mode 100644 index 000000000000..fbadeb089200 --- /dev/null +++ b/packages/google-maps-places/CHANGELOG.md @@ -0,0 +1,17 @@ +# Changelog + +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-maps-places-v0.1.0...google-maps-places-v0.1.1) (2023-06-03) + + +### Documentation + +* fix broken client library documentation links ([#11192](https://github.com/googleapis/google-cloud-python/issues/11192)) ([5e17f7a](https://github.com/googleapis/google-cloud-python/commit/5e17f7a901bbbae8ff9a44ed62f1abd2386da2c8)) + +## 0.1.0 (2023-05-09) + + +### Features + +* add initial files for google.maps.places.v1 ([#11154](https://github.com/googleapis/google-cloud-python/issues/11154)) ([81503bd](https://github.com/googleapis/google-cloud-python/commit/81503bda94fee7fe5c2fe27a13a478efb0591636)) + +## Changelog diff --git a/packages/google-maps-places/CODE_OF_CONDUCT.md b/packages/google-maps-places/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-maps-places/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-maps-places/CONTRIBUTING.rst b/packages/google-maps-places/CONTRIBUTING.rst new file mode 100644 index 000000000000..de6c347f0bf7 --- /dev/null +++ b/packages/google-maps-places/CONTRIBUTING.rst @@ -0,0 +1,281 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-maps-places + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py + + +We also explicitly decided to support Python 3 beginning with version 3.7. +Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-maps-places/LICENSE b/packages/google-maps-places/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-maps-places/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-maps-places/MANIFEST.in b/packages/google-maps-places/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/google-maps-places/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-maps-places/README.rst b/packages/google-maps-places/README.rst new file mode 100644 index 000000000000..805801a39231 --- /dev/null +++ b/packages/google-maps-places/README.rst @@ -0,0 +1,107 @@ +Python Client for Places API +============================ + +|preview| |pypi| |versions| + +`Places API`_: The Places API allows developers to access a variety of search and retrieval endpoints for a Place. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-maps-places.svg + :target: https://pypi.org/project/google-maps-places/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-maps-places.svg + :target: https://pypi.org/project/google-maps-places/ +.. _Places API: https://developers.google.com/maps/documentation/places/web-service/ +.. _Client Library Documentation: https://googleapis.dev/python/places/latest +.. _Product Documentation: https://developers.google.com/maps/documentation/places/web-service/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Places API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Places API.: https://developers.google.com/maps/documentation/places/web-service/ +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-maps-places + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-maps-places + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Places API + to see other available methods on the client. +- Read the `Places API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Places API Product documentation: https://developers.google.com/maps/documentation/places/web-service/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-maps-places/SECURITY.md b/packages/google-maps-places/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-maps-places/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-maps-places/docs/CHANGELOG.md b/packages/google-maps-places/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-maps-places/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-maps-places/docs/README.rst b/packages/google-maps-places/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-maps-places/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-maps-places/docs/_static/custom.css b/packages/google-maps-places/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-maps-places/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-maps-places/docs/_templates/layout.html b/packages/google-maps-places/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-maps-places/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-maps-places/docs/conf.py b/packages/google-maps-places/docs/conf.py new file mode 100644 index 000000000000..793037b776f6 --- /dev/null +++ b/packages/google-maps-places/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-maps-places documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-maps-places" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-maps-places", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-maps-places-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-maps-places.tex", + "google-maps-places Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-maps-places", + "google-maps-places Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-maps-places", + "google-maps-places Documentation", + author, + "google-maps-places", + "google-maps-places Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-maps-places/docs/index.rst b/packages/google-maps-places/docs/index.rst new file mode 100644 index 000000000000..a914e26adc4c --- /dev/null +++ b/packages/google-maps-places/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + places_v1/services + places_v1/types + + +Changelog +--------- + +For a list of all ``google-maps-places`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-maps-places/docs/multiprocessing.rst b/packages/google-maps-places/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-maps-places/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-maps-places/docs/places_v1/places.rst b/packages/google-maps-places/docs/places_v1/places.rst new file mode 100644 index 000000000000..cf160d2126f2 --- /dev/null +++ b/packages/google-maps-places/docs/places_v1/places.rst @@ -0,0 +1,6 @@ +Places +------------------------ + +.. automodule:: google.maps.places_v1.services.places + :members: + :inherited-members: diff --git a/packages/google-maps-places/docs/places_v1/services.rst b/packages/google-maps-places/docs/places_v1/services.rst new file mode 100644 index 000000000000..91337ff78a9a --- /dev/null +++ b/packages/google-maps-places/docs/places_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Maps Places v1 API +====================================== +.. toctree:: + :maxdepth: 2 + + places diff --git a/packages/google-maps-places/docs/places_v1/types.rst b/packages/google-maps-places/docs/places_v1/types.rst new file mode 100644 index 000000000000..d634d52d3f87 --- /dev/null +++ b/packages/google-maps-places/docs/places_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Maps Places v1 API +=================================== + +.. automodule:: google.maps.places_v1.types + :members: + :show-inheritance: diff --git a/packages/google-maps-places/google/maps/places/__init__.py b/packages/google-maps-places/google/maps/places/__init__.py new file mode 100644 index 000000000000..535f07ecb57a --- /dev/null +++ b/packages/google-maps-places/google/maps/places/__init__.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.maps.places import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.maps.places_v1.services.places.async_client import PlacesAsyncClient +from google.maps.places_v1.services.places.client import PlacesClient +from google.maps.places_v1.types.geometry import Circle +from google.maps.places_v1.types.place import Place, PriceLevel +from google.maps.places_v1.types.places_service import ( + Int32Range, + SearchTextRequest, + SearchTextResponse, +) + +__all__ = ( + "PlacesClient", + "PlacesAsyncClient", + "Circle", + "Place", + "PriceLevel", + "Int32Range", + "SearchTextRequest", + "SearchTextResponse", +) diff --git a/packages/google-maps-places/google/maps/places/gapic_version.py b/packages/google-maps-places/google/maps/places/gapic_version.py new file mode 100644 index 000000000000..7e609eff7b01 --- /dev/null +++ b/packages/google-maps-places/google/maps/places/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-maps-places/google/maps/places/py.typed b/packages/google-maps-places/google/maps/places/py.typed new file mode 100644 index 000000000000..ae821546ded2 --- /dev/null +++ b/packages/google-maps-places/google/maps/places/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-maps-places package uses inline types. diff --git a/packages/google-maps-places/google/maps/places_v1/__init__.py b/packages/google-maps-places/google/maps/places_v1/__init__.py new file mode 100644 index 000000000000..975a2146a26f --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.maps.places_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.places import PlacesAsyncClient, PlacesClient +from .types.geometry import Circle +from .types.place import Place, PriceLevel +from .types.places_service import Int32Range, SearchTextRequest, SearchTextResponse + +__all__ = ( + "PlacesAsyncClient", + "Circle", + "Int32Range", + "Place", + "PlacesClient", + "PriceLevel", + "SearchTextRequest", + "SearchTextResponse", +) diff --git a/packages/google-maps-places/google/maps/places_v1/gapic_metadata.json b/packages/google-maps-places/google/maps/places_v1/gapic_metadata.json new file mode 100644 index 000000000000..71bc26dea4ef --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/gapic_metadata.json @@ -0,0 +1,43 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.maps.places_v1", + "protoPackage": "google.maps.places.v1", + "schema": "1.0", + "services": { + "Places": { + "clients": { + "grpc": { + "libraryClient": "PlacesClient", + "rpcs": { + "SearchText": { + "methods": [ + "search_text" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PlacesAsyncClient", + "rpcs": { + "SearchText": { + "methods": [ + "search_text" + ] + } + } + }, + "rest": { + "libraryClient": "PlacesClient", + "rpcs": { + "SearchText": { + "methods": [ + "search_text" + ] + } + } + } + } + } + } +} diff --git a/packages/google-maps-places/google/maps/places_v1/gapic_version.py b/packages/google-maps-places/google/maps/places_v1/gapic_version.py new file mode 100644 index 000000000000..7e609eff7b01 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-maps-places/google/maps/places_v1/py.typed b/packages/google-maps-places/google/maps/places_v1/py.typed new file mode 100644 index 000000000000..ae821546ded2 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-maps-places package uses inline types. diff --git a/packages/google-maps-places/google/maps/places_v1/services/__init__.py b/packages/google-maps-places/google/maps/places_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/__init__.py b/packages/google-maps-places/google/maps/places_v1/services/places/__init__.py new file mode 100644 index 000000000000..e1dfdc8a2384 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/services/places/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import PlacesAsyncClient +from .client import PlacesClient + +__all__ = ( + "PlacesClient", + "PlacesAsyncClient", +) diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py b/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py new file mode 100644 index 000000000000..f5f7e5b9866a --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.maps.places_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.maps.places_v1.types import place, places_service + +from .client import PlacesClient +from .transports.base import DEFAULT_CLIENT_INFO, PlacesTransport +from .transports.grpc_asyncio import PlacesGrpcAsyncIOTransport + + +class PlacesAsyncClient: + """Service definition for the Places API.""" + + _client: PlacesClient + + DEFAULT_ENDPOINT = PlacesClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PlacesClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod(PlacesClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod( + PlacesClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(PlacesClient.common_folder_path) + parse_common_folder_path = staticmethod(PlacesClient.parse_common_folder_path) + common_organization_path = staticmethod(PlacesClient.common_organization_path) + parse_common_organization_path = staticmethod( + PlacesClient.parse_common_organization_path + ) + common_project_path = staticmethod(PlacesClient.common_project_path) + parse_common_project_path = staticmethod(PlacesClient.parse_common_project_path) + common_location_path = staticmethod(PlacesClient.common_location_path) + parse_common_location_path = staticmethod(PlacesClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PlacesAsyncClient: The constructed client. + """ + return PlacesClient.from_service_account_info.__func__(PlacesAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PlacesAsyncClient: The constructed client. + """ + return PlacesClient.from_service_account_file.__func__(PlacesAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PlacesClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PlacesTransport: + """Returns the transport used by the client instance. + + Returns: + PlacesTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(PlacesClient).get_transport_class, type(PlacesClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, PlacesTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the places client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PlacesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PlacesClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def search_text( + self, + request: Optional[Union[places_service.SearchTextRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> places_service.SearchTextResponse: + r"""Text query based place search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import places_v1 + + async def sample_search_text(): + # Create a client + client = places_v1.PlacesAsyncClient() + + # Initialize request argument(s) + request = places_v1.SearchTextRequest( + text_query="text_query_value", + ) + + # Make the request + response = await client.search_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.maps.places_v1.types.SearchTextRequest, dict]]): + The request object. Request data structure for + SearchText. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.places_v1.types.SearchTextResponse: + Response proto for SearchText. + """ + # Create or coerce a protobuf request object. + request = places_service.SearchTextRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_text, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PlacesAsyncClient",) diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/client.py b/packages/google-maps-places/google/maps/places_v1/services/places/client.py new file mode 100644 index 000000000000..caf71a395f96 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/services/places/client.py @@ -0,0 +1,513 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.maps.places_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.maps.places_v1.types import place, places_service + +from .transports.base import DEFAULT_CLIENT_INFO, PlacesTransport +from .transports.grpc import PlacesGrpcTransport +from .transports.grpc_asyncio import PlacesGrpcAsyncIOTransport +from .transports.rest import PlacesRestTransport + + +class PlacesClientMeta(type): + """Metaclass for the Places client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[PlacesTransport]] + _transport_registry["grpc"] = PlacesGrpcTransport + _transport_registry["grpc_asyncio"] = PlacesGrpcAsyncIOTransport + _transport_registry["rest"] = PlacesRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[PlacesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PlacesClient(metaclass=PlacesClientMeta): + """Service definition for the Places API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "places.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PlacesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PlacesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PlacesTransport: + """Returns the transport used by the client instance. + + Returns: + PlacesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, PlacesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the places client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PlacesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PlacesTransport): + # transport is a PlacesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def search_text( + self, + request: Optional[Union[places_service.SearchTextRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> places_service.SearchTextResponse: + r"""Text query based place search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.maps import places_v1 + + def sample_search_text(): + # Create a client + client = places_v1.PlacesClient() + + # Initialize request argument(s) + request = places_v1.SearchTextRequest( + text_query="text_query_value", + ) + + # Make the request + response = client.search_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.maps.places_v1.types.SearchTextRequest, dict]): + The request object. Request data structure for + SearchText. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.maps.places_v1.types.SearchTextResponse: + Response proto for SearchText. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a places_service.SearchTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, places_service.SearchTextRequest): + request = places_service.SearchTextRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_text] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PlacesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PlacesClient",) diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/transports/__init__.py b/packages/google-maps-places/google/maps/places_v1/services/places/transports/__init__.py new file mode 100644 index 000000000000..2ce60c83f644 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/services/places/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PlacesTransport +from .grpc import PlacesGrpcTransport +from .grpc_asyncio import PlacesGrpcAsyncIOTransport +from .rest import PlacesRestInterceptor, PlacesRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PlacesTransport]] +_transport_registry["grpc"] = PlacesGrpcTransport +_transport_registry["grpc_asyncio"] = PlacesGrpcAsyncIOTransport +_transport_registry["rest"] = PlacesRestTransport + +__all__ = ( + "PlacesTransport", + "PlacesGrpcTransport", + "PlacesGrpcAsyncIOTransport", + "PlacesRestTransport", + "PlacesRestInterceptor", +) diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/transports/base.py b/packages/google-maps-places/google/maps/places_v1/services/places/transports/base.py new file mode 100644 index 000000000000..cba5452fe626 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/services/places/transports/base.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.maps.places_v1 import gapic_version as package_version +from google.maps.places_v1.types import places_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class PlacesTransport(abc.ABC): + """Abstract transport class for Places.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "places.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.search_text: gapic_v1.method.wrap_method( + self.search_text, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def search_text( + self, + ) -> Callable[ + [places_service.SearchTextRequest], + Union[ + places_service.SearchTextResponse, + Awaitable[places_service.SearchTextResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("PlacesTransport",) diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/transports/grpc.py b/packages/google-maps-places/google/maps/places_v1/services/places/transports/grpc.py new file mode 100644 index 000000000000..1db5e72f26f0 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/services/places/transports/grpc.py @@ -0,0 +1,267 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.maps.places_v1.types import places_service + +from .base import DEFAULT_CLIENT_INFO, PlacesTransport + + +class PlacesGrpcTransport(PlacesTransport): + """gRPC backend transport for Places. + + Service definition for the Places API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "places.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "places.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def search_text( + self, + ) -> Callable[ + [places_service.SearchTextRequest], places_service.SearchTextResponse + ]: + r"""Return a callable for the search text method over gRPC. + + Text query based place search. + + Returns: + Callable[[~.SearchTextRequest], + ~.SearchTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_text" not in self._stubs: + self._stubs["search_text"] = self.grpc_channel.unary_unary( + "/google.maps.places.v1.Places/SearchText", + request_serializer=places_service.SearchTextRequest.serialize, + response_deserializer=places_service.SearchTextResponse.deserialize, + ) + return self._stubs["search_text"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("PlacesGrpcTransport",) diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/transports/grpc_asyncio.py b/packages/google-maps-places/google/maps/places_v1/services/places/transports/grpc_asyncio.py new file mode 100644 index 000000000000..5fd042ae2194 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/services/places/transports/grpc_asyncio.py @@ -0,0 +1,266 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.maps.places_v1.types import places_service + +from .base import DEFAULT_CLIENT_INFO, PlacesTransport +from .grpc import PlacesGrpcTransport + + +class PlacesGrpcAsyncIOTransport(PlacesTransport): + """gRPC AsyncIO backend transport for Places. + + Service definition for the Places API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "places.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "places.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def search_text( + self, + ) -> Callable[ + [places_service.SearchTextRequest], Awaitable[places_service.SearchTextResponse] + ]: + r"""Return a callable for the search text method over gRPC. + + Text query based place search. + + Returns: + Callable[[~.SearchTextRequest], + Awaitable[~.SearchTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_text" not in self._stubs: + self._stubs["search_text"] = self.grpc_channel.unary_unary( + "/google.maps.places.v1.Places/SearchText", + request_serializer=places_service.SearchTextRequest.serialize, + response_deserializer=places_service.SearchTextResponse.deserialize, + ) + return self._stubs["search_text"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("PlacesGrpcAsyncIOTransport",) diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/transports/rest.py b/packages/google-maps-places/google/maps/places_v1/services/places/transports/rest.py new file mode 100644 index 000000000000..df24dc70c647 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/services/places/transports/rest.py @@ -0,0 +1,314 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.maps.places_v1.types import places_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import PlacesTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class PlacesRestInterceptor: + """Interceptor for Places. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PlacesRestTransport. + + .. code-block:: python + class MyCustomPlacesInterceptor(PlacesRestInterceptor): + def pre_search_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_text(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PlacesRestTransport(interceptor=MyCustomPlacesInterceptor()) + client = PlacesClient(transport=transport) + + + """ + + def pre_search_text( + self, + request: places_service.SearchTextRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[places_service.SearchTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for search_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the Places server. + """ + return request, metadata + + def post_search_text( + self, response: places_service.SearchTextResponse + ) -> places_service.SearchTextResponse: + """Post-rpc interceptor for search_text + + Override in a subclass to manipulate the response + after it is returned by the Places server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PlacesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PlacesRestInterceptor + + +class PlacesRestTransport(PlacesTransport): + """REST backend transport for Places. + + Service definition for the Places API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "places.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[PlacesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PlacesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _SearchText(PlacesRestStub): + def __hash__(self): + return hash("SearchText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: places_service.SearchTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> places_service.SearchTextResponse: + r"""Call the search text method over HTTP. + + Args: + request (~.places_service.SearchTextRequest): + The request object. Request data structure for + SearchText. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.places_service.SearchTextResponse: + Response proto for SearchText. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/Text:search", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_search_text(request, metadata) + pb_request = places_service.SearchTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = places_service.SearchTextResponse() + pb_resp = places_service.SearchTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_text(resp) + return resp + + @property + def search_text( + self, + ) -> Callable[ + [places_service.SearchTextRequest], places_service.SearchTextResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchText(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("PlacesRestTransport",) diff --git a/packages/google-maps-places/google/maps/places_v1/types/__init__.py b/packages/google-maps-places/google/maps/places_v1/types/__init__.py new file mode 100644 index 000000000000..d2cc83a29d31 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/__init__.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .geometry import Circle +from .place import Place, PriceLevel +from .places_service import Int32Range, SearchTextRequest, SearchTextResponse + +__all__ = ( + "Circle", + "Place", + "PriceLevel", + "Int32Range", + "SearchTextRequest", + "SearchTextResponse", +) diff --git a/packages/google-maps-places/google/maps/places_v1/types/geometry.py b/packages/google-maps-places/google/maps/places_v1/types/geometry.py new file mode 100644 index 000000000000..07940a4d158e --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/geometry.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.type import latlng_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "Circle", + }, +) + + +class Circle(proto.Message): + r"""Circle with a LatLng as center and radius. + + Attributes: + center (google.type.latlng_pb2.LatLng): + Required. Center latitude and longitude. + + The range of latitude must be within ``[-90.0, 90.0]``. The + range of the longitude must be within ``[-180.0, 180.0]``. + radius (float): + Required. Radius measured in meters. The radius must be + within ``[0.0, 50000.0]``. + """ + + center: latlng_pb2.LatLng = proto.Field( + proto.MESSAGE, + number=1, + message=latlng_pb2.LatLng, + ) + radius: float = proto.Field( + proto.DOUBLE, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/google/maps/places_v1/types/place.py b/packages/google-maps-places/google/maps/places_v1/types/place.py new file mode 100644 index 000000000000..468c856facf3 --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/place.py @@ -0,0 +1,831 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.geo.type.types import viewport as ggt_viewport +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import latlng_pb2 # type: ignore +from google.type import localized_text_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "PriceLevel", + "Place", + }, +) + + +class PriceLevel(proto.Enum): + r"""Price level of the place. + + Values: + PRICE_LEVEL_UNSPECIFIED (0): + Place price level is unspecified or unknown. + FREE (1): + No description available. + INEXPENSIVE (2): + Place provides inexpensive services. + MODERATE (3): + Place provides moderately priced services. + EXPENSIVE (4): + Place provides expensive services. + VERY_EXPENSIVE (5): + Place provides very expensive services. + """ + PRICE_LEVEL_UNSPECIFIED = 0 + FREE = 1 + INEXPENSIVE = 2 + MODERATE = 3 + EXPENSIVE = 4 + VERY_EXPENSIVE = 5 + + +class Place(proto.Message): + r"""All the information representing a Place. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + Required. The unique identifier of a place. + display_name (google.type.localized_text_pb2.LocalizedText): + The localized name of the place, suitable as + a short human-readable description. For example, + "Google Sydney", "Starbucks", "Pyrmont", etc. + types (MutableSequence[str]): + A set of type tags for this result. For + example, "political" and "locality". + national_phone_number (str): + A human-readable phone number for the place, + in national format. + international_phone_number (str): + A human-readable phone number for the place, + in international format. + formatted_address (str): + A full, human-readable address for this + place. + address_components (MutableSequence[google.maps.places_v1.types.Place.AddressComponent]): + Repeated components for each locality level. + plus_code (google.maps.places_v1.types.Place.PlusCode): + Plus code of the place location lat/long. + location (google.type.latlng_pb2.LatLng): + The position of this place. + viewport (google.geo.type.types.Viewport): + A viewport suitable for displaying the place + on an average-sized map. + rating (float): + A rating between 1.0 and 5.0, based on user + reviews of this place. + google_maps_uri (str): + A URL providing more information about this + place. + website_uri (str): + The authoritative website for this place, + e.g. a business' homepage. Note that for places + that are part of a chain (e.g. an IKEA store), + this will usually be the website for the + individual store, not the overall chain. + reviews (MutableSequence[google.maps.places_v1.types.Place.Review]): + List of reviews about this place. + opening_hours (google.maps.places_v1.types.Place.OpeningHours): + The regular hours of operation. + utc_offset_minutes (int): + Number of minutes this place's timezone is + currently offset from UTC. This is expressed in + minutes to support timezones that are offset by + fractions of an hour, e.g. X hours and 15 + minutes. + adr_format_address (str): + The place's address in adr microformat: + http://microformats.org/wiki/adr. + business_status (google.maps.places_v1.types.Place.BusinessStatus): + The business status for the place. + price_level (google.maps.places_v1.types.PriceLevel): + Price level of the place. + attributions (MutableSequence[google.maps.places_v1.types.Place.Attribution]): + A set of data provider that must be shown + with this result. + user_rating_count (int): + The total number of reviews (with or without + text) for this place. + icon_mask_base_uri (str): + A truncated URL to an v2 icon mask. User can + access different icon type by appending type + suffix to the end (eg, ".svg" or ".png"). + icon_background_color (str): + Background color for icon_mask in hex format, e.g. #909CE1. + takeout (bool): + Specifies if the business supports takeout. + + This field is a member of `oneof`_ ``_takeout``. + delivery (bool): + Specifies if the business supports delivery. + + This field is a member of `oneof`_ ``_delivery``. + dine_in (bool): + Specifies if the business supports indoor or + outdoor seating options. + + This field is a member of `oneof`_ ``_dine_in``. + curbside_pickup (bool): + Specifies if the business supports curbside + pickup. + + This field is a member of `oneof`_ ``_curbside_pickup``. + wheelchair_accessible_entrance (bool): + Specifies if the place has an entrance that + is wheelchair-accessible. + + This field is a member of `oneof`_ ``_wheelchair_accessible_entrance``. + reservable (bool): + Specifies if the place supports reservations. + + This field is a member of `oneof`_ ``_reservable``. + serves_breakfast (bool): + Specifies if the place serves breakfast. + + This field is a member of `oneof`_ ``_serves_breakfast``. + serves_lunch (bool): + Specifies if the place serves lunch. + + This field is a member of `oneof`_ ``_serves_lunch``. + serves_dinner (bool): + Specifies if the place serves dinner. + + This field is a member of `oneof`_ ``_serves_dinner``. + serves_beer (bool): + Specifies if the place serves beer. + + This field is a member of `oneof`_ ``_serves_beer``. + serves_wine (bool): + Specifies if the place serves wine. + + This field is a member of `oneof`_ ``_serves_wine``. + serves_brunch (bool): + Specifies if the place serves brunch. + + This field is a member of `oneof`_ ``_serves_brunch``. + serves_vegetarian_food (bool): + Specifies if the place serves vegetarian + food. + + This field is a member of `oneof`_ ``_serves_vegetarian_food``. + current_opening_hours (google.maps.places_v1.types.Place.OpeningHours): + The hours of operation for the next seven days (including + today). The time period starts at midnight on the date of + the request and ends at 11:59 pm six days later. This field + includes the special_days subfield of all hours, set for + dates that have exceptional hours. + current_secondary_opening_hours (MutableSequence[google.maps.places_v1.types.Place.OpeningHours]): + Contains an array of entries for the next seven days + including information about secondary hours of a business. + Secondary hours are different from a business's main hours. + For example, a restaurant can specify drive through hours or + delivery hours as its secondary hours. This field populates + the type subfield, which draws from a predefined list of + opening hours types (such as DRIVE_THROUGH, PICKUP, or + TAKEOUT) based on the types of the place. This field + includes the special_days subfield of all hours, set for + dates that have exceptional hours. + secondary_opening_hours (MutableSequence[google.maps.places_v1.types.Place.OpeningHours]): + Contains an array of entries for information about regular + secondary hours of a business. Secondary hours are different + from a business's main hours. For example, a restaurant can + specify drive through hours or delivery hours as its + secondary hours. This field populates the type subfield, + which draws from a predefined list of opening hours types + (such as DRIVE_THROUGH, PICKUP, or TAKEOUT) based on the + types of the place. + editorial_summary (google.maps.places_v1.types.Place.EditorialSummary): + Contains a summary of the place. A summary is + comprised of a textual overview, and also + includes the language code for these if + applicable. Summary text must be presented as-is + and can not be modified or altered. + """ + + class BusinessStatus(proto.Enum): + r"""Business status for the place. + + Values: + BUSINESS_STATUS_UNSPECIFIED (0): + Default value. This value is unused. + OPERATIONAL (1): + The establishment is operational, not + necessarily open now. + CLOSED_TEMPORARILY (2): + The establishment is temporarily closed. + CLOSED_PERMANENTLY (3): + The establishment is permanently closed. + """ + BUSINESS_STATUS_UNSPECIFIED = 0 + OPERATIONAL = 1 + CLOSED_TEMPORARILY = 2 + CLOSED_PERMANENTLY = 3 + + class AddressComponent(proto.Message): + r"""The structured components that form the formatted address, if + this information is available. + + Attributes: + long_text (str): + The full text description or name of the address component. + For example, an address component for the country Australia + may have a long_name of "Australia". + short_text (str): + An abbreviated textual name for the address component, if + available. For example, an address component for the country + of Australia may have a short_name of "AU". + types (MutableSequence[str]): + An array indicating the type(s) of the + address component. + language_code (str): + The language used to format this components, + in CLDR notation. + """ + + long_text: str = proto.Field( + proto.STRING, + number=1, + ) + short_text: str = proto.Field( + proto.STRING, + number=2, + ) + types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + language_code: str = proto.Field( + proto.STRING, + number=4, + ) + + class PlusCode(proto.Message): + r"""Plus code (http://plus.codes) is a location reference with + two formats: global code defining a 14mx14m (1/8000th of a + degree) or smaller rectangle, and compound code, replacing the + prefix with a reference location. + + Attributes: + global_code (str): + Place's global (full) code, such as ``9FWM33GV+HQ``, + representing an 1/8000 by 1/8000 degree area (~14 by 14 + meters). + compound_code (str): + Place's compound code, such as ``33GV+HQ, Ramberg, Norway``, + containing the suffix of the global code and replacing the + prefix with a formatted name of a reference entity. + """ + + global_code: str = proto.Field( + proto.STRING, + number=1, + ) + compound_code: str = proto.Field( + proto.STRING, + number=2, + ) + + class Review(proto.Message): + r"""Information about a review of the place. + + Attributes: + publish_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp for the review, expressed in + seconds since epoch. + relative_publish_time_description (str): + A string of formatted recent time, expressing + the review time relative to the current time in + a form appropriate for the language and country. + text (google.type.localized_text_pb2.LocalizedText): + The localized text of the review. + author (str): + The name of the review author. + author_uri (str): + A link to the review author's profile. + author_photo_uri (str): + The author's profile photo. + rating (float): + A whole number between 1.0 and 5.0, a.k.a. + the number of stars. + original_language_code (str): + A BCP-47 language code indicating the original language of + the review. If the review has been translated, then + original_language != language. This field contains the main + language tag only, and not the secondary tag indicating + country or region. For example, all the English reviews are + tagged as 'en', and not 'en-AU' or 'en-UK' and so on.This + field is empty if there is only a rating with no review + text. + translated (bool): + A boolean value indicating if the review was + translated from the original language it was + written in. If a review has been translated, + corresponding to a value of true, Google + recommends that you indicate this to your users. + For example, you can add the following string, + “Translated by Google”, to the review. + """ + + publish_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + relative_publish_time_description: str = proto.Field( + proto.STRING, + number=2, + ) + text: localized_text_pb2.LocalizedText = proto.Field( + proto.MESSAGE, + number=9, + message=localized_text_pb2.LocalizedText, + ) + author: str = proto.Field( + proto.STRING, + number=4, + ) + author_uri: str = proto.Field( + proto.STRING, + number=5, + ) + author_photo_uri: str = proto.Field( + proto.STRING, + number=6, + ) + rating: float = proto.Field( + proto.DOUBLE, + number=7, + ) + original_language_code: str = proto.Field( + proto.STRING, + number=10, + ) + translated: bool = proto.Field( + proto.BOOL, + number=11, + ) + + class OpeningHours(proto.Message): + r"""Information about business hour of the place. + + Attributes: + open_now (bool): + Is this place open right now? Always present + unless we lack time-of-day or timezone data for + these opening hours. + periods (MutableSequence[google.maps.places_v1.types.Place.OpeningHours.OpeningHoursPeriod]): + The periods that this place is open during + the week. The periods are in chronological + order, starting with Sunday in the place-local + timezone. An empty (but not absent) value + indicates a place that is never open, e.g. + because it is closed temporarily for + renovations. + weekday_descriptions (MutableSequence[str]): + Localized strings describing the opening + hours of this place, one string for each day of + the week. Will be empty if the hours are + unknown or could not be converted to localized + text. Example: "Sun: 18:00–06:00". + secondary_hour_type (google.maps.places_v1.types.Place.OpeningHours.SecondaryHourType): + A type string used to identify the type of + secondary hours. + special_days (MutableSequence[google.maps.places_v1.types.Place.OpeningHours.SpecialDay]): + Structured information for special days that fall within the + period that the returned opening hours cover. Special days + are days that could impact the business hours of a place, + e.g. Christmas day. Set for current_opening_hours and + current_secondary_opening_hours if there are exceptional + hours. + """ + + class SecondaryHourType(proto.Enum): + r"""A type used to identify the type of secondary hours. + + Values: + SECONDARY_HOUR_TYPE_UNSPECIFIED (0): + Default value when secondary hour type is not + specified. + DRIVE_THROUGH (1): + The drive-through hour for banks, + restaurants, or pharmacies. + HAPPY_HOUR (2): + The happy hour. + DELIVERY (3): + The delivery hour. + TAKEOUT (4): + The takeout hour. + KITCHEN (5): + The kitchen hour. + BREAKFAST (6): + The breakfast hour. + LUNCH (7): + The lunch hour. + DINNER (8): + The dinner hour. + BRUNCH (9): + The brunch hour. + PICKUP (10): + The pickup hour. + ACCESS (11): + The access hours for storage places. + SENIOR_HOURS (12): + The special hours for seniors. + ONLINE_SERVICE_HOURS (13): + The online service hours. + """ + SECONDARY_HOUR_TYPE_UNSPECIFIED = 0 + DRIVE_THROUGH = 1 + HAPPY_HOUR = 2 + DELIVERY = 3 + TAKEOUT = 4 + KITCHEN = 5 + BREAKFAST = 6 + LUNCH = 7 + DINNER = 8 + BRUNCH = 9 + PICKUP = 10 + ACCESS = 11 + SENIOR_HOURS = 12 + ONLINE_SERVICE_HOURS = 13 + + class OpeningHoursPeriod(proto.Message): + r"""A period the place remains in open_now status. + + Attributes: + open_ (google.maps.places_v1.types.Place.OpeningHours.OpeningHoursPeriod.OpeningHoursPoint): + The time that the place starts to be open. + close (google.maps.places_v1.types.Place.OpeningHours.OpeningHoursPeriod.OpeningHoursPoint): + The time that the place starts to be closed. + """ + + class OpeningHoursPoint(proto.Message): + r"""Status changing points. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + day (int): + A day of the week, as an integer in the range + 0-6. 0 is Sunday, 1 is Monday, etc. + + This field is a member of `oneof`_ ``_day``. + hour (int): + The hour in 2 digits. Ranges from 00 to 23. + + This field is a member of `oneof`_ ``_hour``. + minute (int): + The minute in 2 digits. Ranges from 00 to 59. + + This field is a member of `oneof`_ ``_minute``. + date_deprecated (str): + Date of the endpoint expressed in ``RFC3339`` format in the + local timezone for the place. For example 2010-12-31. + date (google.type.date_pb2.Date): + Date in the local timezone for the place. + truncated (bool): + Whether or not this endpoint was truncated. Truncation + occurs when the real hours are outside the times we are + willing to return hours between, so we truncate the hours + back to these boundaries. This ensures that at most + ``24 * 7`` hours from midnight of the day of the request are + returned. + """ + + day: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + hour: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + minute: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + date_deprecated: str = proto.Field( + proto.STRING, + number=4, + ) + date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=6, + message=date_pb2.Date, + ) + truncated: bool = proto.Field( + proto.BOOL, + number=5, + ) + + open_: "Place.OpeningHours.OpeningHoursPeriod.OpeningHoursPoint" = ( + proto.Field( + proto.MESSAGE, + number=1, + message="Place.OpeningHours.OpeningHoursPeriod.OpeningHoursPoint", + ) + ) + close: "Place.OpeningHours.OpeningHoursPeriod.OpeningHoursPoint" = ( + proto.Field( + proto.MESSAGE, + number=2, + message="Place.OpeningHours.OpeningHoursPeriod.OpeningHoursPoint", + ) + ) + + class SpecialDay(proto.Message): + r"""Structured information for special days that fall within the + period that the returned opening hours cover. Special days are + days that could impact the business hours of a place, e.g. + Christmas day. + + Attributes: + date (google.type.date_pb2.Date): + The date of this special day. + """ + + date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=1, + message=date_pb2.Date, + ) + + open_now: bool = proto.Field( + proto.BOOL, + number=1, + ) + periods: MutableSequence[ + "Place.OpeningHours.OpeningHoursPeriod" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Place.OpeningHours.OpeningHoursPeriod", + ) + weekday_descriptions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + secondary_hour_type: "Place.OpeningHours.SecondaryHourType" = proto.Field( + proto.ENUM, + number=4, + enum="Place.OpeningHours.SecondaryHourType", + ) + special_days: MutableSequence[ + "Place.OpeningHours.SpecialDay" + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="Place.OpeningHours.SpecialDay", + ) + + class Attribution(proto.Message): + r"""Information about data providers of this place. + + Attributes: + provider (str): + Name of the Place's data provider. + provider_uri (str): + URI to the Place's data provider. + """ + + provider: str = proto.Field( + proto.STRING, + number=1, + ) + provider_uri: str = proto.Field( + proto.STRING, + number=2, + ) + + class EditorialSummary(proto.Message): + r"""Contains a summary of the place. + + Attributes: + overview (google.type.localized_text_pb2.LocalizedText): + A summary is comprised of a textual overview, + and also includes the language code for these if + applicable. Summary text must be presented as-is + and can not be modified or altered. + """ + + overview: localized_text_pb2.LocalizedText = proto.Field( + proto.MESSAGE, + number=1, + message=localized_text_pb2.LocalizedText, + ) + + id: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: localized_text_pb2.LocalizedText = proto.Field( + proto.MESSAGE, + number=31, + message=localized_text_pb2.LocalizedText, + ) + types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + national_phone_number: str = proto.Field( + proto.STRING, + number=7, + ) + international_phone_number: str = proto.Field( + proto.STRING, + number=8, + ) + formatted_address: str = proto.Field( + proto.STRING, + number=9, + ) + address_components: MutableSequence[AddressComponent] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=AddressComponent, + ) + plus_code: PlusCode = proto.Field( + proto.MESSAGE, + number=11, + message=PlusCode, + ) + location: latlng_pb2.LatLng = proto.Field( + proto.MESSAGE, + number=12, + message=latlng_pb2.LatLng, + ) + viewport: ggt_viewport.Viewport = proto.Field( + proto.MESSAGE, + number=13, + message=ggt_viewport.Viewport, + ) + rating: float = proto.Field( + proto.DOUBLE, + number=14, + ) + google_maps_uri: str = proto.Field( + proto.STRING, + number=15, + ) + website_uri: str = proto.Field( + proto.STRING, + number=16, + ) + reviews: MutableSequence[Review] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message=Review, + ) + opening_hours: OpeningHours = proto.Field( + proto.MESSAGE, + number=21, + message=OpeningHours, + ) + utc_offset_minutes: int = proto.Field( + proto.INT32, + number=22, + ) + adr_format_address: str = proto.Field( + proto.STRING, + number=24, + ) + business_status: BusinessStatus = proto.Field( + proto.ENUM, + number=25, + enum=BusinessStatus, + ) + price_level: "PriceLevel" = proto.Field( + proto.ENUM, + number=26, + enum="PriceLevel", + ) + attributions: MutableSequence[Attribution] = proto.RepeatedField( + proto.MESSAGE, + number=27, + message=Attribution, + ) + user_rating_count: int = proto.Field( + proto.INT32, + number=28, + ) + icon_mask_base_uri: str = proto.Field( + proto.STRING, + number=29, + ) + icon_background_color: str = proto.Field( + proto.STRING, + number=30, + ) + takeout: bool = proto.Field( + proto.BOOL, + number=33, + optional=True, + ) + delivery: bool = proto.Field( + proto.BOOL, + number=34, + optional=True, + ) + dine_in: bool = proto.Field( + proto.BOOL, + number=35, + optional=True, + ) + curbside_pickup: bool = proto.Field( + proto.BOOL, + number=36, + optional=True, + ) + wheelchair_accessible_entrance: bool = proto.Field( + proto.BOOL, + number=37, + optional=True, + ) + reservable: bool = proto.Field( + proto.BOOL, + number=38, + optional=True, + ) + serves_breakfast: bool = proto.Field( + proto.BOOL, + number=39, + optional=True, + ) + serves_lunch: bool = proto.Field( + proto.BOOL, + number=40, + optional=True, + ) + serves_dinner: bool = proto.Field( + proto.BOOL, + number=41, + optional=True, + ) + serves_beer: bool = proto.Field( + proto.BOOL, + number=42, + optional=True, + ) + serves_wine: bool = proto.Field( + proto.BOOL, + number=43, + optional=True, + ) + serves_brunch: bool = proto.Field( + proto.BOOL, + number=44, + optional=True, + ) + serves_vegetarian_food: bool = proto.Field( + proto.BOOL, + number=45, + optional=True, + ) + current_opening_hours: OpeningHours = proto.Field( + proto.MESSAGE, + number=46, + message=OpeningHours, + ) + current_secondary_opening_hours: MutableSequence[ + OpeningHours + ] = proto.RepeatedField( + proto.MESSAGE, + number=47, + message=OpeningHours, + ) + secondary_opening_hours: MutableSequence[OpeningHours] = proto.RepeatedField( + proto.MESSAGE, + number=49, + message=OpeningHours, + ) + editorial_summary: EditorialSummary = proto.Field( + proto.MESSAGE, + number=48, + message=EditorialSummary, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/google/maps/places_v1/types/places_service.py b/packages/google-maps-places/google/maps/places_v1/types/places_service.py new file mode 100644 index 000000000000..5d479fae9b3e --- /dev/null +++ b/packages/google-maps-places/google/maps/places_v1/types/places_service.py @@ -0,0 +1,338 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.geo.type.types import viewport +import proto # type: ignore + +from google.maps.places_v1.types import geometry, place + +__protobuf__ = proto.module( + package="google.maps.places.v1", + manifest={ + "Int32Range", + "SearchTextRequest", + "SearchTextResponse", + }, +) + + +class Int32Range(proto.Message): + r"""int 32 range. Both min and max are optional. If only min is + set, then the range only has a lower bound. If only max is set, + then range only has an upper bound. At least one of min and max + must be set. Values are inclusive. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + min_ (int): + Lower bound. If unset, behavior is documented + on the range field. + + This field is a member of `oneof`_ ``_min``. + max_ (int): + Upper bound. If unset, behavior is documented + on the range field. + + This field is a member of `oneof`_ ``_max``. + """ + + min_: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + max_: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + + +class SearchTextRequest(proto.Message): + r"""Request data structure for SearchText. + + Attributes: + text_query (str): + Required. The text query for textual search. + language_code (str): + Place details will be displayed with the + preferred language if available. If the language + code is unspecified or unrecognized, place + details of any language may be returned, with a + preference for English if such details exist. + + Current list of supported languages: + https://developers.google.com/maps/faq#languagesupport. + region_code (str): + The Unicode country/region code (CLDR) of the location where + the request is coming from. It is used to display the place + details, like region-specific place name, if available. + + For more information, see + http://www.unicode.org/reports/tr35/#unicode_region_subtag. + + Note that 3-digit region codes are not currently supported. + rank_preference (google.maps.places_v1.types.SearchTextRequest.RankPreference): + How results will be ranked in the response. + location (google.maps.places_v1.types.SearchTextRequest.Location): + The region to search. Setting location would usually yields + better results. Recommended to set. This location serves as + a bias unless strict_restriction is set to true, which turns + the location to a strict restriction. + + Deprecated. Use LocationRestriction or LocationBias instead. + included_type (str): + The requested place type. Full list of types supported: + https://developers.google.com/places/supported_types. Only + support one included type. + open_now (bool): + Used to restrict the search to places that are open at a + specific time. open_now marks if a business is currently + open. + price_range (google.maps.places_v1.types.Int32Range): + [Deprecated!]Used to restrict the search to places that are + within a certain price range. This is on a scale of 0 to 4. + Set a minimum of 0 or set a maximum of 4 has no effect on + the search results. Min price is default to 0 and max price + is default to 4. Default value will be used if either min or + max is unset. + min_rating (float): + Filter out results whose average user rating is strictly + less than this limit. A valid value must be an float between + 0 and 5 (inclusively) at a 0.5 cadence i.e. + ``[0, 0.5, 1.0, ... , 5.0]`` inclusively. This is to keep + parity with LocalRefinement_UserRating. The input rating + will round up to the nearest 0.5(ceiling). For instance, a + rating of 0.6 will eliminate all results with a less than + 1.0 rating. + max_result_count (int): + Maximum number of results to return. It must be between 1 + and 20, inclusively. If the number is unset, it falls back + to the upper limit. If the number is set to negative or + exceeds the upper limit, an INVALID_ARGUMENT error is + returned. + price_levels (MutableSequence[google.maps.places_v1.types.PriceLevel]): + Used to restrict the search to places that + are marked as certain price levels. Users can + choose any combinations of price levels. Default + to select all price levels. + strict_type_filtering (bool): + Used to set strict type filtering for included_type. If set + to true, only results of the same type will be returned. + Default to false. + location_bias (google.maps.places_v1.types.SearchTextRequest.LocationBias): + The region to search. This location serves as a bias which + means results around given location might be returned. + Cannot be set along with location_restriction. + location_restriction (google.maps.places_v1.types.SearchTextRequest.LocationRestriction): + The region to search. This location serves as a restriction + which means results outside given location will not be + returned. Cannot be set along with location_bias. + """ + + class RankPreference(proto.Enum): + r"""How results will be ranked in the response. + + Values: + RANK_PREFERENCE_UNSPECIFIED (0): + RankPreference value not set. Will default to + DISTANCE. + DISTANCE (1): + Ranks results by distance. + RELEVANCE (2): + Ranks results by relevance. Sort order + determined by normal ranking stack. See + SortRefinement::RELEVANCE. + """ + RANK_PREFERENCE_UNSPECIFIED = 0 + DISTANCE = 1 + RELEVANCE = 2 + + class Location(proto.Message): + r"""The region to search. + Deprecated. Use LocationRestriction or LocationBias instead. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + rectangle (google.geo.type.types.Viewport): + A rectangle box defined by northeast and + southwest corner. + + This field is a member of `oneof`_ ``type``. + strict_restriction (bool): + Make location field a strict restriction and + filter out POIs outside of the given location. + If location type field is unset this field will + have no effect. + """ + + rectangle: viewport.Viewport = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message=viewport.Viewport, + ) + strict_restriction: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class LocationBias(proto.Message): + r"""The region to search. This location serves as a bias which + means results around given location might be returned. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + rectangle (google.geo.type.types.Viewport): + A rectangle box defined by northeast and + southwest corner. + + This field is a member of `oneof`_ ``type``. + circle (google.maps.places_v1.types.Circle): + A circle defined by center point and radius. + + This field is a member of `oneof`_ ``type``. + """ + + rectangle: viewport.Viewport = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message=viewport.Viewport, + ) + circle: geometry.Circle = proto.Field( + proto.MESSAGE, + number=2, + oneof="type", + message=geometry.Circle, + ) + + class LocationRestriction(proto.Message): + r"""The region to search. This location serves as a restriction + which means results outside given location will not be returned. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + rectangle (google.geo.type.types.Viewport): + A rectangle box defined by northeast and + southwest corner. + + This field is a member of `oneof`_ ``type``. + """ + + rectangle: viewport.Viewport = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message=viewport.Viewport, + ) + + text_query: str = proto.Field( + proto.STRING, + number=1, + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + region_code: str = proto.Field( + proto.STRING, + number=3, + ) + rank_preference: RankPreference = proto.Field( + proto.ENUM, + number=4, + enum=RankPreference, + ) + location: Location = proto.Field( + proto.MESSAGE, + number=5, + message=Location, + ) + included_type: str = proto.Field( + proto.STRING, + number=6, + ) + open_now: bool = proto.Field( + proto.BOOL, + number=7, + ) + price_range: "Int32Range" = proto.Field( + proto.MESSAGE, + number=8, + message="Int32Range", + ) + min_rating: float = proto.Field( + proto.DOUBLE, + number=9, + ) + max_result_count: int = proto.Field( + proto.INT32, + number=10, + ) + price_levels: MutableSequence[place.PriceLevel] = proto.RepeatedField( + proto.ENUM, + number=11, + enum=place.PriceLevel, + ) + strict_type_filtering: bool = proto.Field( + proto.BOOL, + number=12, + ) + location_bias: LocationBias = proto.Field( + proto.MESSAGE, + number=13, + message=LocationBias, + ) + location_restriction: LocationRestriction = proto.Field( + proto.MESSAGE, + number=14, + message=LocationRestriction, + ) + + +class SearchTextResponse(proto.Message): + r"""Response proto for SearchText. + + Attributes: + places (MutableSequence[google.maps.places_v1.types.Place]): + A list of places that meet the user's text + search criteria. + """ + + places: MutableSequence[place.Place] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=place.Place, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-places/mypy.ini b/packages/google-maps-places/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-maps-places/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-maps-places/noxfile.py b/packages/google-maps-places/noxfile.py new file mode 100644 index 000000000000..1749edb5dad7 --- /dev/null +++ b/packages/google-maps-places/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-maps-places/renovate.json b/packages/google-maps-places/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-maps-places/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-maps-places/samples/generated_samples/places_v1_generated_places_search_text_async.py b/packages/google-maps-places/samples/generated_samples/places_v1_generated_places_search_text_async.py new file mode 100644 index 000000000000..26497017e58b --- /dev/null +++ b/packages/google-maps-places/samples/generated_samples/places_v1_generated_places_search_text_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-places + + +# [START places_v1_generated_Places_SearchText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import places_v1 + + +async def sample_search_text(): + # Create a client + client = places_v1.PlacesAsyncClient() + + # Initialize request argument(s) + request = places_v1.SearchTextRequest( + text_query="text_query_value", + ) + + # Make the request + response = await client.search_text(request=request) + + # Handle the response + print(response) + +# [END places_v1_generated_Places_SearchText_async] diff --git a/packages/google-maps-places/samples/generated_samples/places_v1_generated_places_search_text_sync.py b/packages/google-maps-places/samples/generated_samples/places_v1_generated_places_search_text_sync.py new file mode 100644 index 000000000000..260fdc3cd42a --- /dev/null +++ b/packages/google-maps-places/samples/generated_samples/places_v1_generated_places_search_text_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-maps-places + + +# [START places_v1_generated_Places_SearchText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.maps import places_v1 + + +def sample_search_text(): + # Create a client + client = places_v1.PlacesClient() + + # Initialize request argument(s) + request = places_v1.SearchTextRequest( + text_query="text_query_value", + ) + + # Make the request + response = client.search_text(request=request) + + # Handle the response + print(response) + +# [END places_v1_generated_Places_SearchText_sync] diff --git a/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json b/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json new file mode 100644 index 000000000000..91d60d34ab58 --- /dev/null +++ b/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json @@ -0,0 +1,168 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.maps.places.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-maps-places", + "version": "0.1.1" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.maps.places_v1.PlacesAsyncClient", + "shortName": "PlacesAsyncClient" + }, + "fullName": "google.maps.places_v1.PlacesAsyncClient.search_text", + "method": { + "fullName": "google.maps.places.v1.Places.SearchText", + "service": { + "fullName": "google.maps.places.v1.Places", + "shortName": "Places" + }, + "shortName": "SearchText" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.places_v1.types.SearchTextRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.places_v1.types.SearchTextResponse", + "shortName": "search_text" + }, + "description": "Sample for SearchText", + "file": "places_v1_generated_places_search_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "places_v1_generated_Places_SearchText_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "places_v1_generated_places_search_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.maps.places_v1.PlacesClient", + "shortName": "PlacesClient" + }, + "fullName": "google.maps.places_v1.PlacesClient.search_text", + "method": { + "fullName": "google.maps.places.v1.Places.SearchText", + "service": { + "fullName": "google.maps.places.v1.Places", + "shortName": "Places" + }, + "shortName": "SearchText" + }, + "parameters": [ + { + "name": "request", + "type": "google.maps.places_v1.types.SearchTextRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.maps.places_v1.types.SearchTextResponse", + "shortName": "search_text" + }, + "description": "Sample for SearchText", + "file": "places_v1_generated_places_search_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "places_v1_generated_Places_SearchText_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "places_v1_generated_places_search_text_sync.py" + } + ] +} diff --git a/packages/google-maps-places/scripts/decrypt-secrets.sh b/packages/google-maps-places/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/google-maps-places/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-maps-places/scripts/fixup_places_v1_keywords.py b/packages/google-maps-places/scripts/fixup_places_v1_keywords.py new file mode 100644 index 000000000000..76a664670b4a --- /dev/null +++ b/packages/google-maps-places/scripts/fixup_places_v1_keywords.py @@ -0,0 +1,176 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class placesCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'search_text': ('text_query', 'language_code', 'region_code', 'rank_preference', 'location', 'included_type', 'open_now', 'price_range', 'min_rating', 'max_result_count', 'price_levels', 'strict_type_filtering', 'location_bias', 'location_restriction', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=placesCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the places client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-maps-places/setup.py b/packages/google-maps-places/setup.py new file mode 100644 index 000000000000..3831fc557a03 --- /dev/null +++ b/packages/google-maps-places/setup.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-maps-places" + + +description = "Google Maps Places API client library" + +version = {} +with open(os.path.join(package_root, "google/maps/places/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-geo-type >= 0.1.0, <1.0.0dev", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.maps"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-maps-places/testing/.gitignore b/packages/google-maps-places/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-maps-places/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-maps-places/testing/constraints-3.10.txt b/packages/google-maps-places/testing/constraints-3.10.txt new file mode 100644 index 000000000000..2214a366a259 --- /dev/null +++ b/packages/google-maps-places/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-geo-type diff --git a/packages/google-maps-places/testing/constraints-3.11.txt b/packages/google-maps-places/testing/constraints-3.11.txt new file mode 100644 index 000000000000..2214a366a259 --- /dev/null +++ b/packages/google-maps-places/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-geo-type diff --git a/packages/google-maps-places/testing/constraints-3.12.txt b/packages/google-maps-places/testing/constraints-3.12.txt new file mode 100644 index 000000000000..2214a366a259 --- /dev/null +++ b/packages/google-maps-places/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-geo-type diff --git a/packages/google-maps-places/testing/constraints-3.7.txt b/packages/google-maps-places/testing/constraints-3.7.txt new file mode 100644 index 000000000000..33ac865954a6 --- /dev/null +++ b/packages/google-maps-places/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 +google-geo-type==0.1.0 diff --git a/packages/google-maps-places/testing/constraints-3.8.txt b/packages/google-maps-places/testing/constraints-3.8.txt new file mode 100644 index 000000000000..2214a366a259 --- /dev/null +++ b/packages/google-maps-places/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-geo-type diff --git a/packages/google-maps-places/testing/constraints-3.9.txt b/packages/google-maps-places/testing/constraints-3.9.txt new file mode 100644 index 000000000000..2214a366a259 --- /dev/null +++ b/packages/google-maps-places/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-geo-type diff --git a/packages/google-maps-places/tests/__init__.py b/packages/google-maps-places/tests/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-maps-places/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-places/tests/unit/__init__.py b/packages/google-maps-places/tests/unit/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-maps-places/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-places/tests/unit/gapic/__init__.py b/packages/google-maps-places/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-maps-places/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-places/tests/unit/gapic/places_v1/__init__.py b/packages/google-maps-places/tests/unit/gapic/places_v1/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-maps-places/tests/unit/gapic/places_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py b/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py new file mode 100644 index 000000000000..3a577fd456aa --- /dev/null +++ b/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py @@ -0,0 +1,1672 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.geo.type.types import viewport +from google.oauth2 import service_account +from google.protobuf import json_format +from google.type import latlng_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.maps.places_v1.services.places import ( + PlacesAsyncClient, + PlacesClient, + transports, +) +from google.maps.places_v1.types import geometry, place, places_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PlacesClient._get_default_mtls_endpoint(None) is None + assert PlacesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + PlacesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + ) + assert ( + PlacesClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PlacesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert PlacesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PlacesClient, "grpc"), + (PlacesAsyncClient, "grpc_asyncio"), + (PlacesClient, "rest"), + ], +) +def test_places_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "places.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://places.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.PlacesGrpcTransport, "grpc"), + (transports.PlacesGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.PlacesRestTransport, "rest"), + ], +) +def test_places_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PlacesClient, "grpc"), + (PlacesAsyncClient, "grpc_asyncio"), + (PlacesClient, "rest"), + ], +) +def test_places_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "places.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://places.googleapis.com" + ) + + +def test_places_client_get_transport_class(): + transport = PlacesClient.get_transport_class() + available_transports = [ + transports.PlacesGrpcTransport, + transports.PlacesRestTransport, + ] + assert transport in available_transports + + transport = PlacesClient.get_transport_class("grpc") + assert transport == transports.PlacesGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PlacesClient, transports.PlacesGrpcTransport, "grpc"), + (PlacesAsyncClient, transports.PlacesGrpcAsyncIOTransport, "grpc_asyncio"), + (PlacesClient, transports.PlacesRestTransport, "rest"), + ], +) +@mock.patch.object( + PlacesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PlacesClient) +) +@mock.patch.object( + PlacesAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PlacesAsyncClient) +) +def test_places_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PlacesClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PlacesClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (PlacesClient, transports.PlacesGrpcTransport, "grpc", "true"), + ( + PlacesAsyncClient, + transports.PlacesGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (PlacesClient, transports.PlacesGrpcTransport, "grpc", "false"), + ( + PlacesAsyncClient, + transports.PlacesGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (PlacesClient, transports.PlacesRestTransport, "rest", "true"), + (PlacesClient, transports.PlacesRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + PlacesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PlacesClient) +) +@mock.patch.object( + PlacesAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PlacesAsyncClient) +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_places_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [PlacesClient, PlacesAsyncClient]) +@mock.patch.object( + PlacesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PlacesClient) +) +@mock.patch.object( + PlacesAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PlacesAsyncClient) +) +def test_places_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PlacesClient, transports.PlacesGrpcTransport, "grpc"), + (PlacesAsyncClient, transports.PlacesGrpcAsyncIOTransport, "grpc_asyncio"), + (PlacesClient, transports.PlacesRestTransport, "rest"), + ], +) +def test_places_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (PlacesClient, transports.PlacesGrpcTransport, "grpc", grpc_helpers), + ( + PlacesAsyncClient, + transports.PlacesGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (PlacesClient, transports.PlacesRestTransport, "rest", None), + ], +) +def test_places_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_places_client_client_options_from_dict(): + with mock.patch( + "google.maps.places_v1.services.places.transports.PlacesGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PlacesClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (PlacesClient, transports.PlacesGrpcTransport, "grpc", grpc_helpers), + ( + PlacesAsyncClient, + transports.PlacesGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_places_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "places.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="places.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + places_service.SearchTextRequest, + dict, + ], +) +def test_search_text(request_type, transport: str = "grpc"): + client = PlacesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = places_service.SearchTextResponse() + response = client.search_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == places_service.SearchTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, places_service.SearchTextResponse) + + +def test_search_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PlacesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_text), "__call__") as call: + client.search_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == places_service.SearchTextRequest() + + +@pytest.mark.asyncio +async def test_search_text_async( + transport: str = "grpc_asyncio", request_type=places_service.SearchTextRequest +): + client = PlacesAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + places_service.SearchTextResponse() + ) + response = await client.search_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == places_service.SearchTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, places_service.SearchTextResponse) + + +@pytest.mark.asyncio +async def test_search_text_async_from_dict(): + await test_search_text_async(request_type=dict) + + +@pytest.mark.parametrize( + "request_type", + [ + places_service.SearchTextRequest, + dict, + ], +) +def test_search_text_rest(request_type): + client = PlacesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = places_service.SearchTextResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = places_service.SearchTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.search_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, places_service.SearchTextResponse) + + +def test_search_text_rest_required_fields( + request_type=places_service.SearchTextRequest, +): + transport_class = transports.PlacesRestTransport + + request_init = {} + request_init["text_query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["textQuery"] = "text_query_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "textQuery" in jsonified_request + assert jsonified_request["textQuery"] == "text_query_value" + + client = PlacesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = places_service.SearchTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = places_service.SearchTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.search_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_search_text_rest_unset_required_fields(): + transport = transports.PlacesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.search_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("textQuery",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_text_rest_interceptors(null_interceptor): + transport = transports.PlacesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PlacesRestInterceptor(), + ) + client = PlacesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PlacesRestInterceptor, "post_search_text" + ) as post, mock.patch.object( + transports.PlacesRestInterceptor, "pre_search_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = places_service.SearchTextRequest.pb( + places_service.SearchTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = places_service.SearchTextResponse.to_json( + places_service.SearchTextResponse() + ) + + request = places_service.SearchTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = places_service.SearchTextResponse() + + client.search_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_text_rest_bad_request( + transport: str = "rest", request_type=places_service.SearchTextRequest +): + client = PlacesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_text(request) + + +def test_search_text_rest_error(): + client = PlacesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PlacesGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PlacesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PlacesGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PlacesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PlacesGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PlacesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PlacesClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PlacesGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PlacesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PlacesGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PlacesClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PlacesGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PlacesGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PlacesGrpcTransport, + transports.PlacesGrpcAsyncIOTransport, + transports.PlacesRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = PlacesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PlacesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PlacesGrpcTransport, + ) + + +def test_places_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PlacesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_places_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.maps.places_v1.services.places.transports.PlacesTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PlacesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("search_text",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_places_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.maps.places_v1.services.places.transports.PlacesTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PlacesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_places_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.maps.places_v1.services.places.transports.PlacesTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PlacesTransport() + adc.assert_called_once() + + +def test_places_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PlacesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PlacesGrpcTransport, + transports.PlacesGrpcAsyncIOTransport, + ], +) +def test_places_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PlacesGrpcTransport, + transports.PlacesGrpcAsyncIOTransport, + transports.PlacesRestTransport, + ], +) +def test_places_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PlacesGrpcTransport, grpc_helpers), + (transports.PlacesGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_places_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "places.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="places.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.PlacesGrpcTransport, transports.PlacesGrpcAsyncIOTransport], +) +def test_places_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_places_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.PlacesRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_places_host_no_port(transport_name): + client = PlacesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="places.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "places.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://places.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_places_host_with_port(transport_name): + client = PlacesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="places.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "places.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://places.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_places_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = PlacesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = PlacesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.search_text._session + session2 = client2.transport.search_text._session + assert session1 != session2 + + +def test_places_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PlacesGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_places_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PlacesGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.PlacesGrpcTransport, transports.PlacesGrpcAsyncIOTransport], +) +def test_places_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.PlacesGrpcTransport, transports.PlacesGrpcAsyncIOTransport], +) +def test_places_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PlacesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = PlacesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PlacesClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = PlacesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = PlacesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PlacesClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = PlacesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = PlacesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PlacesClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = PlacesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = PlacesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PlacesClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = PlacesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = PlacesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PlacesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PlacesTransport, "_prep_wrapped_messages" + ) as prep: + client = PlacesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PlacesTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PlacesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = PlacesAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = PlacesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = PlacesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (PlacesClient, transports.PlacesGrpcTransport), + (PlacesAsyncClient, transports.PlacesGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-maps-routing/.coveragerc b/packages/google-maps-routing/.coveragerc index 4557ec924a06..d0ce5597b0a9 100644 --- a/packages/google-maps-routing/.coveragerc +++ b/packages/google-maps-routing/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/maps/routing/__init__.py + google/maps/routing/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/google-maps-routing/.repo-metadata.json b/packages/google-maps-routing/.repo-metadata.json index 76ba5c9b9300..3922055a2990 100644 --- a/packages/google-maps-routing/.repo-metadata.json +++ b/packages/google-maps-routing/.repo-metadata.json @@ -3,7 +3,7 @@ "name_pretty": "Google Maps Routing", "api_description": "Help your users find the ideal way to get from A to Z with comprehensive data and real-time traffic.", "product_documentation": "https://mapsplatform.google.com/maps-products/#routes-section", - "client_documentation": "https://cloud.google.com/python/docs/reference/routing/latest", + "client_documentation": "https://googleapis.dev/python/routing/latest", "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", "release_level": "preview", "language": "python", diff --git a/packages/google-maps-routing/CHANGELOG.md b/packages/google-maps-routing/CHANGELOG.md index a6203cf52343..08d6c49e48b0 100644 --- a/packages/google-maps-routing/CHANGELOG.md +++ b/packages/google-maps-routing/CHANGELOG.md @@ -1,5 +1,35 @@ # Changelog +## [0.5.1](https://github.com/googleapis/google-cloud-python/compare/google-maps-routing-v0.5.0...google-maps-routing-v0.5.1) (2023-06-03) + + +### Documentation + +* fix broken client library documentation links ([#11192](https://github.com/googleapis/google-cloud-python/issues/11192)) ([5e17f7a](https://github.com/googleapis/google-cloud-python/commit/5e17f7a901bbbae8ff9a44ed62f1abd2386da2c8)) + +## [0.5.0](https://github.com/googleapis/google-cloud-python/compare/google-maps-routing-v0.4.0...google-maps-routing-v0.5.0) (2023-03-25) + + +### Features + +* Add support for specifying region_code and language_code in the ComputeRouteMatrixRequest ([80f77f1](https://github.com/googleapis/google-cloud-python/commit/80f77f17494d4d98e9f8ad7e0d4a693e4bd12ede)) +* Add support for specifying region_code in the ComputeRoutesRequest ([80f77f1](https://github.com/googleapis/google-cloud-python/commit/80f77f17494d4d98e9f8ad7e0d4a693e4bd12ede)) +* Add support for specifying waypoints as addresses ([80f77f1](https://github.com/googleapis/google-cloud-python/commit/80f77f17494d4d98e9f8ad7e0d4a693e4bd12ede)) + + +### Documentation + +* Clarify usage of compute_alternative_routes in proto comment ([80f77f1](https://github.com/googleapis/google-cloud-python/commit/80f77f17494d4d98e9f8ad7e0d4a693e4bd12ede)) +* Clarify usage of RouteLegStepTravelAdvisory in comment ([80f77f1](https://github.com/googleapis/google-cloud-python/commit/80f77f17494d4d98e9f8ad7e0d4a693e4bd12ede)) +* Update proto comments to contain concrete references to other proto messages ([80f77f1](https://github.com/googleapis/google-cloud-python/commit/80f77f17494d4d98e9f8ad7e0d4a693e4bd12ede)) + +## [0.4.0](https://github.com/googleapis/google-cloud-python/compare/google-maps-routing-v0.3.1...google-maps-routing-v0.4.0) (2023-02-09) + + +### Features + +* enable "rest" transport in Python for services supporting numeric enums ([#10839](https://github.com/googleapis/google-cloud-python/issues/10839)) ([ad59d56](https://github.com/googleapis/google-cloud-python/commit/ad59d569bda339ed31500602e2db369afdbfcf0b)) + ## [0.3.1](https://github.com/googleapis/google-cloud-python/compare/google-maps-routing-v0.3.0...google-maps-routing-v0.3.1) (2023-01-20) diff --git a/packages/google-maps-routing/README.rst b/packages/google-maps-routing/README.rst index a109dfad7f24..af806067396e 100644 --- a/packages/google-maps-routing/README.rst +++ b/packages/google-maps-routing/README.rst @@ -15,7 +15,7 @@ Python Client for Google Maps Routing .. |versions| image:: https://img.shields.io/pypi/pyversions/google-maps-routing.svg :target: https://pypi.org/project/google-maps-routing/ .. _Google Maps Routing: https://mapsplatform.google.com/maps-products/#routes-section -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/routing/latest +.. _Client Library Documentation: https://googleapis.dev/python/routing/latest .. _Product Documentation: https://mapsplatform.google.com/maps-products/#routes-section Quick Start diff --git a/packages/google-maps-routing/google/maps/routing/__init__.py b/packages/google-maps-routing/google/maps/routing/__init__.py index 3525247e0abd..ef47c5e1bb55 100644 --- a/packages/google-maps-routing/google/maps/routing/__init__.py +++ b/packages/google-maps-routing/google/maps/routing/__init__.py @@ -25,6 +25,10 @@ FallbackReason, FallbackRoutingMode, ) +from google.maps.routing_v2.types.geocoding_results import ( + GeocodedWaypoint, + GeocodingResults, +) from google.maps.routing_v2.types.location import Location from google.maps.routing_v2.types.maneuver import Maneuver from google.maps.routing_v2.types.navigation_instruction import NavigationInstruction @@ -68,6 +72,8 @@ "FallbackInfo", "FallbackReason", "FallbackRoutingMode", + "GeocodedWaypoint", + "GeocodingResults", "Location", "Maneuver", "NavigationInstruction", diff --git a/packages/google-maps-routing/google/maps/routing/gapic_version.py b/packages/google-maps-routing/google/maps/routing/gapic_version.py index 056efd287f1d..d27a8e0e057a 100644 --- a/packages/google-maps-routing/google/maps/routing/gapic_version.py +++ b/packages/google-maps-routing/google/maps/routing/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.3.1" # {x-release-please-version} +__version__ = "0.5.1" # {x-release-please-version} diff --git a/packages/google-maps-routing/google/maps/routing_v2/__init__.py b/packages/google-maps-routing/google/maps/routing_v2/__init__.py index 694a27df3918..4c3204fd4875 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/__init__.py +++ b/packages/google-maps-routing/google/maps/routing_v2/__init__.py @@ -13,13 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.maps.routing import gapic_version as package_version +from google.maps.routing_v2 import gapic_version as package_version __version__ = package_version.__version__ from .services.routes import RoutesAsyncClient, RoutesClient from .types.fallback_info import FallbackInfo, FallbackReason, FallbackRoutingMode +from .types.geocoding_results import GeocodedWaypoint, GeocodingResults from .types.location import Location from .types.maneuver import Maneuver from .types.navigation_instruction import NavigationInstruction @@ -61,6 +62,8 @@ "FallbackInfo", "FallbackReason", "FallbackRoutingMode", + "GeocodedWaypoint", + "GeocodingResults", "Location", "Maneuver", "NavigationInstruction", diff --git a/packages/google-maps-routing/google/maps/routing_v2/gapic_metadata.json b/packages/google-maps-routing/google/maps/routing_v2/gapic_metadata.json index 90d6d771cb0d..8382cea1d39a 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/gapic_metadata.json +++ b/packages/google-maps-routing/google/maps/routing_v2/gapic_metadata.json @@ -36,6 +36,21 @@ ] } } + }, + "rest": { + "libraryClient": "RoutesClient", + "rpcs": { + "ComputeRouteMatrix": { + "methods": [ + "compute_route_matrix" + ] + }, + "ComputeRoutes": { + "methods": [ + "compute_routes" + ] + } + } } } } diff --git a/packages/google-maps-routing/google/maps/routing_v2/gapic_version.py b/packages/google-maps-routing/google/maps/routing_v2/gapic_version.py index aa80f74315ce..d27a8e0e057a 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/gapic_version.py +++ b/packages/google-maps-routing/google/maps/routing_v2/gapic_version.py @@ -14,4 +14,4 @@ # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.5.1" # {x-release-please-version} diff --git a/packages/google-maps-routing/google/maps/routing_v2/services/routes/async_client.py b/packages/google-maps-routing/google/maps/routing_v2/services/routes/async_client.py index a026c56d4229..c27576e1df92 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/services/routes/async_client.py +++ b/packages/google-maps-routing/google/maps/routing_v2/services/routes/async_client.py @@ -47,7 +47,12 @@ from google.protobuf import duration_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore -from google.maps.routing_v2.types import fallback_info, route, routes_service +from google.maps.routing_v2.types import ( + fallback_info, + geocoding_results, + route, + routes_service, +) from .client import RoutesClient from .transports.base import DEFAULT_CLIENT_INFO, RoutesTransport diff --git a/packages/google-maps-routing/google/maps/routing_v2/services/routes/client.py b/packages/google-maps-routing/google/maps/routing_v2/services/routes/client.py index 7822fa3b55fe..8d4a09c031e3 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/services/routes/client.py +++ b/packages/google-maps-routing/google/maps/routing_v2/services/routes/client.py @@ -50,11 +50,17 @@ from google.protobuf import duration_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore -from google.maps.routing_v2.types import fallback_info, route, routes_service +from google.maps.routing_v2.types import ( + fallback_info, + geocoding_results, + route, + routes_service, +) from .transports.base import DEFAULT_CLIENT_INFO, RoutesTransport from .transports.grpc import RoutesGrpcTransport from .transports.grpc_asyncio import RoutesGrpcAsyncIOTransport +from .transports.rest import RoutesRestTransport class RoutesClientMeta(type): @@ -68,6 +74,7 @@ class RoutesClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[RoutesTransport]] _transport_registry["grpc"] = RoutesGrpcTransport _transport_registry["grpc_asyncio"] = RoutesGrpcAsyncIOTransport + _transport_registry["rest"] = RoutesRestTransport def get_transport_class( cls, diff --git a/packages/google-maps-routing/google/maps/routing_v2/services/routes/transports/__init__.py b/packages/google-maps-routing/google/maps/routing_v2/services/routes/transports/__init__.py index fb3ce61ac451..f53b86efbd51 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/services/routes/transports/__init__.py +++ b/packages/google-maps-routing/google/maps/routing_v2/services/routes/transports/__init__.py @@ -19,14 +19,18 @@ from .base import RoutesTransport from .grpc import RoutesGrpcTransport from .grpc_asyncio import RoutesGrpcAsyncIOTransport +from .rest import RoutesRestInterceptor, RoutesRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[RoutesTransport]] _transport_registry["grpc"] = RoutesGrpcTransport _transport_registry["grpc_asyncio"] = RoutesGrpcAsyncIOTransport +_transport_registry["rest"] = RoutesRestTransport __all__ = ( "RoutesTransport", "RoutesGrpcTransport", "RoutesGrpcAsyncIOTransport", + "RoutesRestTransport", + "RoutesRestInterceptor", ) diff --git a/packages/google-maps-routing/google/maps/routing_v2/services/routes/transports/rest.py b/packages/google-maps-routing/google/maps/routing_v2/services/routes/transports/rest.py new file mode 100644 index 000000000000..544ac9efa306 --- /dev/null +++ b/packages/google-maps-routing/google/maps/routing_v2/services/routes/transports/rest.py @@ -0,0 +1,454 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.maps.routing_v2.types import routes_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import RoutesTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RoutesRestInterceptor: + """Interceptor for Routes. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RoutesRestTransport. + + .. code-block:: python + class MyCustomRoutesInterceptor(RoutesRestInterceptor): + def pre_compute_route_matrix(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_compute_route_matrix(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_compute_routes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_compute_routes(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RoutesRestTransport(interceptor=MyCustomRoutesInterceptor()) + client = RoutesClient(transport=transport) + + + """ + + def pre_compute_route_matrix( + self, + request: routes_service.ComputeRouteMatrixRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[routes_service.ComputeRouteMatrixRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for compute_route_matrix + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routes server. + """ + return request, metadata + + def post_compute_route_matrix( + self, response: rest_streaming.ResponseIterator + ) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for compute_route_matrix + + Override in a subclass to manipulate the response + after it is returned by the Routes server but before + it is returned to user code. + """ + return response + + def pre_compute_routes( + self, + request: routes_service.ComputeRoutesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[routes_service.ComputeRoutesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for compute_routes + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routes server. + """ + return request, metadata + + def post_compute_routes( + self, response: routes_service.ComputeRoutesResponse + ) -> routes_service.ComputeRoutesResponse: + """Post-rpc interceptor for compute_routes + + Override in a subclass to manipulate the response + after it is returned by the Routes server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RoutesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RoutesRestInterceptor + + +class RoutesRestTransport(RoutesTransport): + """REST backend transport for Routes. + + The Routes API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "routes.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[RoutesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RoutesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ComputeRouteMatrix(RoutesRestStub): + def __hash__(self): + return hash("ComputeRouteMatrix") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: routes_service.ComputeRouteMatrixRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rest_streaming.ResponseIterator: + r"""Call the compute route matrix method over HTTP. + + Args: + request (~.routes_service.ComputeRouteMatrixRequest): + The request object. ComputeRouteMatrix request message + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.routes_service.RouteMatrixElement: + Encapsulates route information + computed for an origin/destination pair + in the ComputeRouteMatrix API. This + proto can be streamed to the client. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/distanceMatrix/v2:computeRouteMatrix", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_compute_route_matrix( + request, metadata + ) + pb_request = routes_service.ComputeRouteMatrixRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator( + response, routes_service.RouteMatrixElement + ) + resp = self._interceptor.post_compute_route_matrix(resp) + return resp + + class _ComputeRoutes(RoutesRestStub): + def __hash__(self): + return hash("ComputeRoutes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: routes_service.ComputeRoutesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> routes_service.ComputeRoutesResponse: + r"""Call the compute routes method over HTTP. + + Args: + request (~.routes_service.ComputeRoutesRequest): + The request object. ComputeRoutes request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.routes_service.ComputeRoutesResponse: + ComputeRoutes the response message. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/directions/v2:computeRoutes", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_compute_routes(request, metadata) + pb_request = routes_service.ComputeRoutesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = routes_service.ComputeRoutesResponse() + pb_resp = routes_service.ComputeRoutesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_compute_routes(resp) + return resp + + @property + def compute_route_matrix( + self, + ) -> Callable[ + [routes_service.ComputeRouteMatrixRequest], routes_service.RouteMatrixElement + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ComputeRouteMatrix(self._session, self._host, self._interceptor) # type: ignore + + @property + def compute_routes( + self, + ) -> Callable[ + [routes_service.ComputeRoutesRequest], routes_service.ComputeRoutesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ComputeRoutes(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("RoutesRestTransport",) diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/__init__.py b/packages/google-maps-routing/google/maps/routing_v2/types/__init__.py index 9efaa71283c5..0fc81931dc68 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/__init__.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/__init__.py @@ -14,6 +14,7 @@ # limitations under the License. # from .fallback_info import FallbackInfo, FallbackReason, FallbackRoutingMode +from .geocoding_results import GeocodedWaypoint, GeocodingResults from .location import Location from .navigation_instruction import NavigationInstruction from .polyline import Polyline, PolylineEncoding, PolylineQuality @@ -44,6 +45,8 @@ "FallbackInfo", "FallbackReason", "FallbackRoutingMode", + "GeocodedWaypoint", + "GeocodingResults", "Location", "Maneuver", "NavigationInstruction", diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/fallback_info.py b/packages/google-maps-routing/google/maps/routing_v2/types/fallback_info.py index 03793b8124fb..428909da9cff 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/fallback_info.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/fallback_info.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -56,11 +58,13 @@ class FallbackRoutingMode(proto.Enum): FALLBACK_ROUTING_MODE_UNSPECIFIED (0): Not used. FALLBACK_TRAFFIC_UNAWARE (1): - Indicates the "TRAFFIC_UNAWARE" routing mode was used to + Indicates the ``TRAFFIC_UNAWARE`` + [google.maps.routing.v2.RoutingPreference] was used to compute the response. FALLBACK_TRAFFIC_AWARE (2): - Indicates the "TRAFFIC_AWARE" routing mode was used to - compute the response. + Indicates the ``TRAFFIC_AWARE`` + [RoutingPreference][google.maps.routing.v2.RoutingPreference] + was used to compute the response. """ FALLBACK_ROUTING_MODE_UNSPECIFIED = 0 FALLBACK_TRAFFIC_UNAWARE = 1 diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/geocoding_results.py b/packages/google-maps-routing/google/maps/routing_v2/types/geocoding_results.py new file mode 100644 index 000000000000..3aadb7121661 --- /dev/null +++ b/packages/google-maps-routing/google/maps/routing_v2/types/geocoding_results.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.maps.routing.v2", + manifest={ + "GeocodingResults", + "GeocodedWaypoint", + }, +) + + +class GeocodingResults(proto.Message): + r"""Contains + [GeocodedWaypoints][google.maps.routing.v2.GeocodedWaypoint] for + origin, destination and intermediate waypoints. Only populated for + address waypoints. + + Attributes: + origin (google.maps.routing_v2.types.GeocodedWaypoint): + Origin geocoded waypoint. + destination (google.maps.routing_v2.types.GeocodedWaypoint): + Destination geocoded waypoint. + intermediates (MutableSequence[google.maps.routing_v2.types.GeocodedWaypoint]): + A list of intermediate geocoded waypoints + each containing an index field that corresponds + to the zero-based position of the waypoint in + the order they were specified in the request. + """ + + origin: "GeocodedWaypoint" = proto.Field( + proto.MESSAGE, + number=1, + message="GeocodedWaypoint", + ) + destination: "GeocodedWaypoint" = proto.Field( + proto.MESSAGE, + number=2, + message="GeocodedWaypoint", + ) + intermediates: MutableSequence["GeocodedWaypoint"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="GeocodedWaypoint", + ) + + +class GeocodedWaypoint(proto.Message): + r"""Details about the locations used as waypoints. Only populated + for address waypoints. Includes details about the geocoding + results for the purposes of determining what the address was + geocoded to. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + geocoder_status (google.rpc.status_pb2.Status): + Indicates the status code resulting from the + geocoding operation. + intermediate_waypoint_request_index (int): + The index of the corresponding intermediate + waypoint in the request. Only populated if the + corresponding waypoint is an intermediate + waypoint. + + This field is a member of `oneof`_ ``_intermediate_waypoint_request_index``. + type_ (MutableSequence[str]): + The type(s) of the result, in the form of + zero or more type tags. Supported types: + https://developers.google.com/maps/documentation/geocoding/requests-geocoding#Types + partial_match (bool): + Indicates that the geocoder did not return an + exact match for the original request, though it + was able to match part of the requested address. + You may wish to examine the original request for + misspellings and/or an incomplete address. + place_id (str): + The place ID for this result. + """ + + geocoder_status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + intermediate_waypoint_request_index: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + type_: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + partial_match: bool = proto.Field( + proto.BOOL, + number=4, + ) + place_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/location.py b/packages/google-maps-routing/google/maps/routing_v2/types/location.py index 237769600274..f3803ba2507f 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/location.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/location.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import wrappers_pb2 # type: ignore @@ -40,7 +42,8 @@ class Location(proto.Message): the road to use for pickup and drop-off. Heading values can be from 0 to 360, where 0 specifies a heading of due North, 90 specifies a heading of due East, etc. You can use this - field only for ``DRIVE`` and ``TWO_WHEELER`` travel modes. + field only for ``DRIVE`` and ``TWO_WHEELER`` + [RouteTravelMode][google.maps.routing.v2.RouteTravelMode]. """ lat_lng: latlng_pb2.LatLng = proto.Field( diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/maneuver.py b/packages/google-maps-routing/google/maps/routing_v2/types/maneuver.py index f34eb6fc00b0..61fdc16720db 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/maneuver.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/maneuver.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/navigation_instruction.py b/packages/google-maps-routing/google/maps/routing_v2/types/navigation_instruction.py index 5db2a88381e9..0d774dff34d7 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/navigation_instruction.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/navigation_instruction.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/polyline.py b/packages/google-maps-routing/google/maps/routing_v2/types/polyline.py index 72b670f4edcf..c4fb0b364e2f 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/polyline.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/polyline.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import struct_pb2 # type: ignore diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/route.py b/packages/google-maps-routing/google/maps/routing_v2/types/route.py index 5ce8eba29790..c1272b02a875 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/route.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/route.py @@ -13,9 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence -import google.geo.type.types +from google.geo.type.types import viewport as ggt_viewport from google.protobuf import duration_pb2 # type: ignore import proto # type: ignore @@ -52,10 +54,11 @@ class Route(proto.Message): legs (MutableSequence[google.maps.routing_v2.types.RouteLeg]): A collection of legs (path segments between waypoints) that make-up the route. Each leg corresponds to the trip between - two non-\ ``via`` Waypoints. For example, a route with no - intermediate waypoints has only one leg. A route that - includes one non-\ ``via`` intermediate waypoint has two - legs. A route that includes one ``via`` intermediate + two non-\ ``via`` + [Waypoints][google.maps.routing.v2.Waypoint]. For example, a + route with no intermediate waypoints has only one leg. A + route that includes one non-\ ``via`` intermediate waypoint + has two legs. A route that includes one ``via`` intermediate waypoint has one leg. The order of the legs matches the order of Waypoints from ``origin`` to ``intermediates`` to ``destination``. @@ -135,10 +138,10 @@ class Route(proto.Message): proto.STRING, number=7, ) - viewport: google.geo.type.types.Viewport = proto.Field( + viewport: ggt_viewport.Viewport = proto.Field( proto.MESSAGE, number=8, - message=google.geo.type.types.Viewport, + message=ggt_viewport.Viewport, ) travel_advisory: "RouteTravelAdvisory" = proto.Field( proto.MESSAGE, @@ -252,19 +255,7 @@ class RouteLegStepTravelAdvisory(proto.Message): Attributes: speed_reading_intervals (MutableSequence[google.maps.routing_v2.types.SpeedReadingInterval]): - Speed reading intervals detailing traffic density. - Applicable in case of ``TRAFFIC_AWARE`` and - ``TRAFFIC_AWARE_OPTIMAL`` routing preferences. The intervals - cover the entire polyline of the RouteLegStep without - overlap. The start point of a specified interval is the same - as the end point of the preceding interval. - - Example: - - :: - - polyline: A ---- B ---- C ---- D ---- E ---- F ---- G - speed_reading_intervals: [A,C), [C,D), [D,G). + NOTE: This field is not currently populated. """ speed_reading_intervals: MutableSequence[ @@ -359,7 +350,8 @@ class RouteLeg(proto.Message): class RouteLegStep(proto.Message): - r"""Encapsulates a segment of a ``RouteLeg``. A step corresponds to a + r"""Encapsulates a segment of a + [RouteLeg][google.maps.routing.v2.RouteLeg]. A step corresponds to a single navigation instruction. Route legs are made up of steps. Attributes: diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/route_label.py b/packages/google-maps-routing/google/maps/routing_v2/types/route_label.py index 8ed18e92ca21..21f0b175bd31 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/route_label.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/route_label.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -26,8 +28,9 @@ class RouteLabel(proto.Enum): - r"""Labels for the ``Route`` that are useful to identify specific - properties of the route to compare against others. + r"""Labels for the [Route][google.maps.routing.v2.Route] that are useful + to identify specific properties of the route to compare against + others. Values: ROUTE_LABEL_UNSPECIFIED (0): @@ -38,8 +41,8 @@ class RouteLabel(proto.Enum): DEFAULT_ROUTE_ALTERNATE (2): An alternative to the default "best" route. Routes like this will be returned when - ``ComputeRoutesRequest.compute_alternative_routes`` is - specified. + [compute_alternative_routes][google.maps.routing.v2.ComputeRoutesRequest.compute_alternative_routes] + is specified. FUEL_EFFICIENT (3): Fuel efficient route. Routes labeled with this value are determined to be optimized for diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/route_modifiers.py b/packages/google-maps-routing/google/maps/routing_v2/types/route_modifiers.py index daf463946c1a..8d3c62be2560 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/route_modifiers.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/route_modifiers.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -37,22 +39,22 @@ class RouteModifiers(proto.Message): Specifies whether to avoid toll roads where reasonable. Preference will be given to routes not containing toll roads. Applies only to the ``DRIVE`` and ``TWO_WHEELER`` - travel modes. + [RouteTravelMode][google.maps.routing.v2.RouteTravelMode]. avoid_highways (bool): Specifies whether to avoid highways where reasonable. Preference will be given to routes not containing highways. - Applies only to the ``DRIVE`` and ``TWO_WHEELER`` travel - modes. + Applies only to the ``DRIVE`` and ``TWO_WHEELER`` + [RouteTravelMode][google.maps.routing.v2.RouteTravelMode]. avoid_ferries (bool): Specifies whether to avoid ferries where reasonable. Preference will be given to routes not containing travel by ferries. Applies only to the ``DRIVE`` and\ ``TWO_WHEELER`` - travel modes. + [RouteTravelMode][google.maps.routing.v2.RouteTravelMode]. avoid_indoor (bool): Specifies whether to avoid navigating indoors where reasonable. Preference will be given to routes not containing indoor navigation. Applies only to the ``WALK`` - travel mode. + [RouteTravelMode][google.maps.routing.v2.RouteTravelMode]. vehicle_info (google.maps.routing_v2.types.VehicleInfo): Specifies the vehicle information. toll_passes (MutableSequence[google.maps.routing_v2.types.TollPass]): @@ -60,7 +62,8 @@ class RouteModifiers(proto.Message): are provided, the API tries to return the pass price. If toll passes are not provided, the API treats the toll pass as unknown and tries to return the cash price. Applies only - to the DRIVE and TWO_WHEELER travel modes. + to the ``DRIVE`` and ``TWO_WHEELER`` + [RouteTravelMode][google.maps.routing.v2.RouteTravelMode]. """ avoid_tolls: bool = proto.Field( diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/route_travel_mode.py b/packages/google-maps-routing/google/maps/routing_v2/types/route_travel_mode.py index 6c5f4c096e8f..9a4109a9e795 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/route_travel_mode.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/route_travel_mode.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -26,11 +28,11 @@ class RouteTravelMode(proto.Enum): - r"""A set of values used to specify the mode of travel. NOTE: WALK, - BICYCLE, and TWO_WHEELER routes are in beta and might sometimes be - missing clear sidewalks, pedestrian paths, or bicycling paths. You - must display this warning to the user for all walking, bicycling, - and two-wheel routes that you display in your app. + r"""A set of values used to specify the mode of travel. NOTE: ``WALK``, + ``BICYCLE``, and ``TWO_WHEELER`` routes are in beta and might + sometimes be missing clear sidewalks, pedestrian paths, or bicycling + paths. You must display this warning to the user for all walking, + bicycling, and two-wheel routes that you display in your app. Values: TRAVEL_MODE_UNSPECIFIED (0): diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/routes_service.py b/packages/google-maps-routing/google/maps/routing_v2/types/routes_service.py index 25266a8d5107..81f18bab8136 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/routes_service.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/routes_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import duration_pb2 # type: ignore @@ -20,6 +22,7 @@ from google.rpc import status_pb2 # type: ignore import proto # type: ignore +from google.maps.routing_v2.types import geocoding_results as gmr_geocoding_results from google.maps.routing_v2.types import routing_preference as gmr_routing_preference from google.maps.routing_v2.types import fallback_info as gmr_fallback_info from google.maps.routing_v2.types import polyline, route @@ -98,7 +101,9 @@ class ComputeRoutesRequest(proto.Message): the request fails. compute_alternative_routes (bool): Optional. Specifies whether to calculate - alternate routes in addition to the route. + alternate routes in addition to the route. No + alternative routes are returned for requests + that have intermediate waypoints. route_modifiers (google.maps.routing_v2.types.RouteModifiers): Optional. A set of conditions to satisfy that affect the way routes are calculated. @@ -111,19 +116,24 @@ class ComputeRoutesRequest(proto.Message): for the list of supported languages. When you don't provide this value, the display language is inferred from the location of the route request. + region_code (str): + Optional. The region code, specified as a ccTLD ("top-level + domain") two-character value. For more information see + https://en.wikipedia.org/wiki/List_of_Internet_top-level_domains#Country_code_top-level_domains units (google.maps.routing_v2.types.Units): Optional. Specifies the units of measure for the display fields. This includes the ``instruction`` field in - ``NavigationInstruction``. The units of measure used for the - route, leg, step distance, and duration are not affected by - this value. If you don't provide this value, then the - display units are inferred from the location of the request. + [NavigationInstruction][google.maps.routing.v2.NavigationInstruction]. + The units of measure used for the route, leg, step distance, + and duration are not affected by this value. If you don't + provide this value, then the display units are inferred from + the location of the request. requested_reference_routes (MutableSequence[google.maps.routing_v2.types.ComputeRoutesRequest.ReferenceRoute]): Optional. Specifies what reference routes to calculate as part of the request in addition to the default route. A reference route is a route with a different route - calculation objective than the default route. For example an - FUEL_EFFICIENT reference route calculation takes into + calculation objective than the default route. For example a + ``FUEL_EFFICIENT`` reference route calculation takes into account various parameters that would generate an optimal fuel efficient route. extra_computations (MutableSequence[google.maps.routing_v2.types.ComputeRoutesRequest.ExtraComputation]): @@ -222,6 +232,10 @@ class ExtraComputation(proto.Enum): proto.STRING, number=10, ) + region_code: str = proto.Field( + proto.STRING, + number=16, + ) units: gmr_units.Units = proto.Field( proto.ENUM, number=11, @@ -257,6 +271,9 @@ class ComputeRoutesResponse(proto.Message): is used, this field contains detailed info about the fallback response. Otherwise this field is unset. + geocoding_results (google.maps.routing_v2.types.GeocodingResults): + Contains geocoding response info for + waypoints specified as addresses. """ routes: MutableSequence[route.Route] = proto.RepeatedField( @@ -269,6 +286,11 @@ class ComputeRoutesResponse(proto.Message): number=2, message=gmr_fallback_info.FallbackInfo, ) + geocoding_results: gmr_geocoding_results.GeocodingResults = proto.Field( + proto.MESSAGE, + number=3, + message=gmr_geocoding_results.GeocodingResults, + ) class ComputeRouteMatrixRequest(proto.Message): @@ -306,6 +328,19 @@ class ComputeRouteMatrixRequest(proto.Message): you made the request. If you set this value to a time that has already occurred, the request fails. + language_code (str): + Optional. The BCP-47 language code, such as "en-US" or + "sr-Latn". For more information, see + http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + See `Language + Support `__ + for the list of supported languages. When you don't provide + this value, the display language is inferred from the + location of the first origin. + region_code (str): + Optional. The region code, specified as a ccTLD ("top-level + domain") two-character value. For more information see + https://en.wikipedia.org/wiki/List_of_Internet_top-level_domains#Country_code_top-level_domains extra_computations (MutableSequence[google.maps.routing_v2.types.ComputeRouteMatrixRequest.ExtraComputation]): Optional. A list of extra computations which may be used to complete the request. Note: These @@ -353,6 +388,14 @@ class ExtraComputation(proto.Enum): number=5, message=timestamp_pb2.Timestamp, ) + language_code: str = proto.Field( + proto.STRING, + number=6, + ) + region_code: str = proto.Field( + proto.STRING, + number=9, + ) extra_computations: MutableSequence[ExtraComputation] = proto.RepeatedField( proto.ENUM, number=8, @@ -426,11 +469,13 @@ class RouteMatrixElement(proto.Message): The travel distance of the route, in meters. duration (google.protobuf.duration_pb2.Duration): The length of time needed to navigate the route. If you set - the ``routing_preference`` to ``TRAFFIC_UNAWARE``, then this - value is the same as ``static_duration``. If you set the - ``routing_preference`` to either ``TRAFFIC_AWARE`` or - ``TRAFFIC_AWARE_OPTIMAL``, then this value is calculated - taking traffic conditions into account. + the + [routing_preference][google.maps.routing.v2.ComputeRouteMatrixRequest.routing_preference] + to ``TRAFFIC_UNAWARE``, then this value is the same as + ``static_duration``. If you set the ``routing_preference`` + to either ``TRAFFIC_AWARE`` or ``TRAFFIC_AWARE_OPTIMAL``, + then this value is calculated taking traffic conditions into + account. static_duration (google.protobuf.duration_pb2.Duration): The duration of traveling through the route without taking traffic conditions into diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/routing_preference.py b/packages/google-maps-routing/google/maps/routing_v2/types/routing_preference.py index 10cc4f54f32d..26fb06d13c32 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/routing_preference.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/routing_preference.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -37,13 +39,15 @@ class RoutingPreference(proto.Enum): Computes routes without taking live traffic conditions into consideration. Suitable when traffic conditions don't matter or are not applicable. Using this value produces the lowest - latency. Note: For ``RouteTravelMode`` DRIVE and TWO_WHEELER - choice of route and duration are based on road network and - average time-independent traffic conditions. Results for a - given request may vary over time due to changes in the road - network, updated average traffic conditions, and the - distributed nature of the service. Results may also vary - between nearly-equivalent routes at any time or frequency. + latency. Note: For + [RouteTravelMode][google.maps.routing.v2.RouteTravelMode] + ``DRIVE`` and ``TWO_WHEELER`` choice of route and duration + are based on road network and average time-independent + traffic conditions. Results for a given request may vary + over time due to changes in the road network, updated + average traffic conditions, and the distributed nature of + the service. Results may also vary between nearly-equivalent + routes at any time or frequency. TRAFFIC_AWARE (2): Calculates routes taking live traffic conditions into consideration. In contrast to ``TRAFFIC_AWARE_OPTIMAL``, diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/speed_reading_interval.py b/packages/google-maps-routing/google/maps/routing_v2/types/speed_reading_interval.py index 52a828b850ae..238811a3522f 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/speed_reading_interval.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/speed_reading_interval.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -47,6 +49,8 @@ class SpeedReadingInterval(proto.Message): This field is a member of `oneof`_ ``_end_polyline_point_index``. speed (google.maps.routing_v2.types.SpeedReadingInterval.Speed): Traffic speed in this interval. + + This field is a member of `oneof`_ ``speed_type``. """ class Speed(proto.Enum): @@ -80,6 +84,7 @@ class Speed(proto.Enum): speed: Speed = proto.Field( proto.ENUM, number=3, + oneof="speed_type", enum=Speed, ) diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/toll_info.py b/packages/google-maps-routing/google/maps/routing_v2/types/toll_info.py index 052e1f7641d2..901f8ea51f8a 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/toll_info.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/toll_info.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.type import money_pb2 # type: ignore @@ -27,19 +29,20 @@ class TollInfo(proto.Message): - r"""Encapsulates toll information on a ``Route`` or on a ``RouteLeg``. + r"""Encapsulates toll information on a + [Route][google.maps.routing.v2.Route] or on a + [RouteLeg][google.maps.routing.v2.RouteLeg]. Attributes: estimated_price (MutableSequence[google.type.money_pb2.Money]): - The monetary amount of tolls for the - corresponding Route or RouteLeg. This list - contains a money amount for each currency that - is expected to be charged by the toll stations. - Typically this list will contain only one item - for routes with tolls in one currency. For - international trips, this list may contain - multiple items to reflect tolls in different - currencies. + The monetary amount of tolls for the corresponding + [Route][google.maps.routing.v2.Route] or + [RouteLeg][google.maps.routing.v2.RouteLeg]. This list + contains a money amount for each currency that is expected + to be charged by the toll stations. Typically this list will + contain only one item for routes with tolls in one currency. + For international trips, this list may contain multiple + items to reflect tolls in different currencies. """ estimated_price: MutableSequence[money_pb2.Money] = proto.RepeatedField( diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/toll_passes.py b/packages/google-maps-routing/google/maps/routing_v2/types/toll_passes.py index 96d6fcf8f95c..6ca198eb924c 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/toll_passes.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/toll_passes.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -175,6 +177,8 @@ class TollPass(proto.Enum): ME, USA. US_MI_AMBASSADOR_BRIDGE_PREMIER_COMMUTER_CARD (36): MI, USA. + US_MI_BCPASS (94): + MI, USA. US_MI_GROSSE_ILE_TOLL_BRIDGE_PASS_TAG (37): MI, USA. US_MI_IQ_PROX_CARD (38): @@ -304,6 +308,7 @@ class TollPass(proto.Enum): US_MD_EZPASSMD = 34 US_ME_EZPASSME = 35 US_MI_AMBASSADOR_BRIDGE_PREMIER_COMMUTER_CARD = 36 + US_MI_BCPASS = 94 US_MI_GROSSE_ILE_TOLL_BRIDGE_PASS_TAG = 37 US_MI_IQ_PROX_CARD = 38 US_MI_MACKINAC_BRIDGE_MAC_PASS = 39 diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/units.py b/packages/google-maps-routing/google/maps/routing_v2/types/units.py index 1fbbd1a0c8cd..95737a261a12 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/units.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/units.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/vehicle_emission_type.py b/packages/google-maps-routing/google/maps/routing_v2/types/vehicle_emission_type.py index 762aff83aa2d..0df563e53a46 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/vehicle_emission_type.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/vehicle_emission_type.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -26,13 +28,13 @@ class VehicleEmissionType(proto.Enum): - r"""A set of values describing the vehicle's emission type. - Applies only to the DRIVE travel mode. + r"""A set of values describing the vehicle's emission type. Applies only + to the ``DRIVE`` + [RouteTravelMode][google.maps.routing.v2.RouteTravelMode]. Values: VEHICLE_EMISSION_TYPE_UNSPECIFIED (0): - No emission type specified. Default to - GASOLINE. + No emission type specified. Default to ``GASOLINE``. GASOLINE (1): Gasoline/petrol fueled vehicle. ELECTRIC (2): diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/vehicle_info.py b/packages/google-maps-routing/google/maps/routing_v2/types/vehicle_info.py index e2802bad3a3c..4005a547d2eb 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/vehicle_info.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/vehicle_info.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -33,8 +35,8 @@ class VehicleInfo(proto.Message): Attributes: emission_type (google.maps.routing_v2.types.VehicleEmissionType): - Describes the vehicle's emission type. - Applies only to the DRIVE travel mode. + Describes the vehicle's emission type. Applies only to the + ``DRIVE`` travel mode. """ emission_type: vehicle_emission_type.VehicleEmissionType = proto.Field( diff --git a/packages/google-maps-routing/google/maps/routing_v2/types/waypoint.py b/packages/google-maps-routing/google/maps/routing_v2/types/waypoint.py index b3690e2392bb..0b9160d1c2e0 100644 --- a/packages/google-maps-routing/google/maps/routing_v2/types/waypoint.py +++ b/packages/google-maps-routing/google/maps/routing_v2/types/waypoint.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore @@ -49,12 +51,18 @@ class Waypoint(proto.Message): The POI Place ID associated with the waypoint. + This field is a member of `oneof`_ ``location_type``. + address (str): + Human readable address or a plus code. + See https://plus.codes for details. + This field is a member of `oneof`_ ``location_type``. via (bool): Marks this waypoint as a milestone rather a stopping point. For each non-via waypoint in the request, the response - appends an entry to the ``legs`` array to provide the - details for stopovers on that leg of the trip. Set this + appends an entry to the + [legs][google.maps.routing.v2.Route.legs] array to provide + the details for stopovers on that leg of the trip. Set this value to true when you want the route to pass through this waypoint without stopping over. Via waypoints don't cause an entry to be added to the ``legs`` array, but they do route @@ -71,7 +79,8 @@ class Waypoint(proto.Message): non-\ ``via`` waypoints on roads that are unsuitable for pickup and drop-off. This option works only for ``DRIVE`` and ``TWO_WHEELER`` travel modes, and when the - ``location_type`` is ``location``. + ``location_type`` is + [Location][google.maps.routing.v2.Location]. side_of_road (bool): Indicates that the location of this waypoint is meant to have a preference for the vehicle to stop at a particular @@ -79,7 +88,8 @@ class Waypoint(proto.Message): through the location so that the vehicle can stop at the side of road that the location is biased towards from the center of the road. This option works only for 'DRIVE' and - 'TWO_WHEELER' travel modes. + 'TWO_WHEELER' + [RouteTravelMode][google.maps.routing.v2.RouteTravelMode]. """ location: gmr_location.Location = proto.Field( @@ -93,6 +103,11 @@ class Waypoint(proto.Message): number=2, oneof="location_type", ) + address: str = proto.Field( + proto.STRING, + number=7, + oneof="location_type", + ) via: bool = proto.Field( proto.BOOL, number=3, diff --git a/packages/google-maps-routing/noxfile.py b/packages/google-maps-routing/noxfile.py index fcacc5d528dd..1749edb5dad7 100644 --- a/packages/google-maps-routing/noxfile.py +++ b/packages/google-maps-routing/noxfile.py @@ -189,9 +189,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -378,8 +378,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", diff --git a/packages/google-maps-routing/samples/generated_samples/snippet_metadata_google.maps.routing.v2.json b/packages/google-maps-routing/samples/generated_samples/snippet_metadata_google.maps.routing.v2.json index ccd2a54789eb..f95dfceaada8 100644 --- a/packages/google-maps-routing/samples/generated_samples/snippet_metadata_google.maps.routing.v2.json +++ b/packages/google-maps-routing/samples/generated_samples/snippet_metadata_google.maps.routing.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-routing", - "version": "0.3.1" + "version": "0.5.1" }, "snippets": [ { diff --git a/packages/google-maps-routing/scripts/fixup_routing_v2_keywords.py b/packages/google-maps-routing/scripts/fixup_routing_v2_keywords.py index 291c4ea93db5..ab8c9f6352a2 100644 --- a/packages/google-maps-routing/scripts/fixup_routing_v2_keywords.py +++ b/packages/google-maps-routing/scripts/fixup_routing_v2_keywords.py @@ -39,8 +39,8 @@ def partition( class routingCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'compute_route_matrix': ('origins', 'destinations', 'travel_mode', 'routing_preference', 'departure_time', 'extra_computations', ), - 'compute_routes': ('origin', 'destination', 'intermediates', 'travel_mode', 'routing_preference', 'polyline_quality', 'polyline_encoding', 'departure_time', 'compute_alternative_routes', 'route_modifiers', 'language_code', 'units', 'requested_reference_routes', 'extra_computations', ), + 'compute_route_matrix': ('origins', 'destinations', 'travel_mode', 'routing_preference', 'departure_time', 'language_code', 'region_code', 'extra_computations', ), + 'compute_routes': ('origin', 'destination', 'intermediates', 'travel_mode', 'routing_preference', 'polyline_quality', 'polyline_encoding', 'departure_time', 'compute_alternative_routes', 'route_modifiers', 'language_code', 'region_code', 'units', 'requested_reference_routes', 'extra_computations', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-maps-routing/setup.py b/packages/google-maps-routing/setup.py index e3d5415b7c2d..80774eef6672 100644 --- a/packages/google-maps-routing/setup.py +++ b/packages/google-maps-routing/setup.py @@ -40,7 +40,7 @@ "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-geo-type<1.0.0dev", + "google-geo-type >= 0.1.0, <1.0.0dev", ] url = "https://github.com/googleapis/google-cloud-python" @@ -56,9 +56,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.maps"] setuptools.setup( name=name, diff --git a/packages/google-maps-routing/testing/constraints-3.10.txt b/packages/google-maps-routing/testing/constraints-3.10.txt index ed7f9aed2559..2214a366a259 100644 --- a/packages/google-maps-routing/testing/constraints-3.10.txt +++ b/packages/google-maps-routing/testing/constraints-3.10.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +google-geo-type diff --git a/packages/google-maps-routing/testing/constraints-3.11.txt b/packages/google-maps-routing/testing/constraints-3.11.txt index ed7f9aed2559..2214a366a259 100644 --- a/packages/google-maps-routing/testing/constraints-3.11.txt +++ b/packages/google-maps-routing/testing/constraints-3.11.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +google-geo-type diff --git a/packages/google-maps-routing/testing/constraints-3.12.txt b/packages/google-maps-routing/testing/constraints-3.12.txt index ed7f9aed2559..2214a366a259 100644 --- a/packages/google-maps-routing/testing/constraints-3.12.txt +++ b/packages/google-maps-routing/testing/constraints-3.12.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +google-geo-type diff --git a/packages/google-maps-routing/testing/constraints-3.8.txt b/packages/google-maps-routing/testing/constraints-3.8.txt index ed7f9aed2559..2214a366a259 100644 --- a/packages/google-maps-routing/testing/constraints-3.8.txt +++ b/packages/google-maps-routing/testing/constraints-3.8.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +google-geo-type diff --git a/packages/google-maps-routing/testing/constraints-3.9.txt b/packages/google-maps-routing/testing/constraints-3.9.txt index ed7f9aed2559..2214a366a259 100644 --- a/packages/google-maps-routing/testing/constraints-3.9.txt +++ b/packages/google-maps-routing/testing/constraints-3.9.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +google-geo-type diff --git a/packages/google-maps-routing/tests/unit/gapic/routing_v2/test_routes.py b/packages/google-maps-routing/tests/unit/gapic/routing_v2/test_routes.py index a64b4553d0da..068ec592948f 100644 --- a/packages/google-maps-routing/tests/unit/gapic/routing_v2/test_routes.py +++ b/packages/google-maps-routing/tests/unit/gapic/routing_v2/test_routes.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -32,6 +34,7 @@ from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore @@ -41,6 +44,8 @@ from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.maps.routing_v2.services.routes import ( RoutesAsyncClient, @@ -49,6 +54,7 @@ ) from google.maps.routing_v2.types import ( fallback_info, + geocoding_results, location, polyline, route, @@ -107,6 +113,7 @@ def test__get_default_mtls_endpoint(): [ (RoutesClient, "grpc"), (RoutesAsyncClient, "grpc_asyncio"), + (RoutesClient, "rest"), ], ) def test_routes_client_from_service_account_info(client_class, transport_name): @@ -120,7 +127,11 @@ def test_routes_client_from_service_account_info(client_class, transport_name): assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("routes.googleapis.com:443") + assert client.transport._host == ( + "routes.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://routes.googleapis.com" + ) @pytest.mark.parametrize( @@ -128,6 +139,7 @@ def test_routes_client_from_service_account_info(client_class, transport_name): [ (transports.RoutesGrpcTransport, "grpc"), (transports.RoutesGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.RoutesRestTransport, "rest"), ], ) def test_routes_client_service_account_always_use_jwt(transport_class, transport_name): @@ -151,6 +163,7 @@ def test_routes_client_service_account_always_use_jwt(transport_class, transport [ (RoutesClient, "grpc"), (RoutesAsyncClient, "grpc_asyncio"), + (RoutesClient, "rest"), ], ) def test_routes_client_from_service_account_file(client_class, transport_name): @@ -171,13 +184,18 @@ def test_routes_client_from_service_account_file(client_class, transport_name): assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("routes.googleapis.com:443") + assert client.transport._host == ( + "routes.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://routes.googleapis.com" + ) def test_routes_client_get_transport_class(): transport = RoutesClient.get_transport_class() available_transports = [ transports.RoutesGrpcTransport, + transports.RoutesRestTransport, ] assert transport in available_transports @@ -190,6 +208,7 @@ def test_routes_client_get_transport_class(): [ (RoutesClient, transports.RoutesGrpcTransport, "grpc"), (RoutesAsyncClient, transports.RoutesGrpcAsyncIOTransport, "grpc_asyncio"), + (RoutesClient, transports.RoutesRestTransport, "rest"), ], ) @mock.patch.object( @@ -329,6 +348,8 @@ def test_routes_client_client_options(client_class, transport_class, transport_n "grpc_asyncio", "false", ), + (RoutesClient, transports.RoutesRestTransport, "rest", "true"), + (RoutesClient, transports.RoutesRestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -514,6 +535,7 @@ def test_routes_client_get_mtls_endpoint_and_cert_source(client_class): [ (RoutesClient, transports.RoutesGrpcTransport, "grpc"), (RoutesAsyncClient, transports.RoutesGrpcAsyncIOTransport, "grpc_asyncio"), + (RoutesClient, transports.RoutesRestTransport, "rest"), ], ) def test_routes_client_client_options_scopes( @@ -549,6 +571,7 @@ def test_routes_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (RoutesClient, transports.RoutesRestTransport, "rest", None), ], ) def test_routes_client_client_options_credentials_file( @@ -830,6 +853,448 @@ async def test_compute_route_matrix_async_from_dict(): await test_compute_route_matrix_async(request_type=dict) +@pytest.mark.parametrize( + "request_type", + [ + routes_service.ComputeRoutesRequest, + dict, + ], +) +def test_compute_routes_rest(request_type): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = routes_service.ComputeRoutesResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = routes_service.ComputeRoutesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.compute_routes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, routes_service.ComputeRoutesResponse) + + +def test_compute_routes_rest_required_fields( + request_type=routes_service.ComputeRoutesRequest, +): + transport_class = transports.RoutesRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).compute_routes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).compute_routes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = routes_service.ComputeRoutesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = routes_service.ComputeRoutesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.compute_routes(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_compute_routes_rest_unset_required_fields(): + transport = transports.RoutesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.compute_routes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "origin", + "destination", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_compute_routes_rest_interceptors(null_interceptor): + transport = transports.RoutesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutesRestInterceptor(), + ) + client = RoutesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RoutesRestInterceptor, "post_compute_routes" + ) as post, mock.patch.object( + transports.RoutesRestInterceptor, "pre_compute_routes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = routes_service.ComputeRoutesRequest.pb( + routes_service.ComputeRoutesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = routes_service.ComputeRoutesResponse.to_json( + routes_service.ComputeRoutesResponse() + ) + + request = routes_service.ComputeRoutesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = routes_service.ComputeRoutesResponse() + + client.compute_routes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_compute_routes_rest_bad_request( + transport: str = "rest", request_type=routes_service.ComputeRoutesRequest +): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.compute_routes(request) + + +def test_compute_routes_rest_error(): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + routes_service.ComputeRouteMatrixRequest, + dict, + ], +) +def test_compute_route_matrix_rest(request_type): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = routes_service.RouteMatrixElement( + origin_index=1279, + destination_index=1817, + condition=routes_service.RouteMatrixElementCondition.ROUTE_EXISTS, + distance_meters=1594, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = routes_service.RouteMatrixElement.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.compute_route_matrix(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, routes_service.RouteMatrixElement) + assert response.origin_index == 1279 + assert response.destination_index == 1817 + assert response.condition == routes_service.RouteMatrixElementCondition.ROUTE_EXISTS + assert response.distance_meters == 1594 + + +def test_compute_route_matrix_rest_required_fields( + request_type=routes_service.ComputeRouteMatrixRequest, +): + transport_class = transports.RoutesRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).compute_route_matrix._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).compute_route_matrix._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = routes_service.RouteMatrixElement() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = routes_service.RouteMatrixElement.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.compute_route_matrix(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_compute_route_matrix_rest_unset_required_fields(): + transport = transports.RoutesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.compute_route_matrix._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "origins", + "destinations", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_compute_route_matrix_rest_interceptors(null_interceptor): + transport = transports.RoutesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutesRestInterceptor(), + ) + client = RoutesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RoutesRestInterceptor, "post_compute_route_matrix" + ) as post, mock.patch.object( + transports.RoutesRestInterceptor, "pre_compute_route_matrix" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = routes_service.ComputeRouteMatrixRequest.pb( + routes_service.ComputeRouteMatrixRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = routes_service.RouteMatrixElement.to_json( + routes_service.RouteMatrixElement() + ) + req.return_value._content = "[{}]".format(req.return_value._content) + + request = routes_service.ComputeRouteMatrixRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = routes_service.RouteMatrixElement() + + client.compute_route_matrix( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_compute_route_matrix_rest_bad_request( + transport: str = "rest", request_type=routes_service.ComputeRouteMatrixRequest +): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.compute_route_matrix(request) + + +def test_compute_route_matrix_rest_error(): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.RoutesGrpcTransport( @@ -911,6 +1376,7 @@ def test_transport_get_channel(): [ transports.RoutesGrpcTransport, transports.RoutesGrpcAsyncIOTransport, + transports.RoutesRestTransport, ], ) def test_transport_adc(transport_class): @@ -925,6 +1391,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -1055,6 +1522,7 @@ def test_routes_transport_auth_adc(transport_class): [ transports.RoutesGrpcTransport, transports.RoutesGrpcAsyncIOTransport, + transports.RoutesRestTransport, ], ) def test_routes_transport_auth_gdch_credentials(transport_class): @@ -1149,11 +1617,23 @@ def test_routes_grpc_transport_client_cert_source_for_mtls(transport_class): ) +def test_routes_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RoutesRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_routes_host_no_port(transport_name): @@ -1164,7 +1644,11 @@ def test_routes_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("routes.googleapis.com:443") + assert client.transport._host == ( + "routes.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://routes.googleapis.com" + ) @pytest.mark.parametrize( @@ -1172,6 +1656,7 @@ def test_routes_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_routes_host_with_port(transport_name): @@ -1182,7 +1667,36 @@ def test_routes_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("routes.googleapis.com:8000") + assert client.transport._host == ( + "routes.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://routes.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_routes_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RoutesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RoutesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.compute_routes._session + session2 = client2.transport.compute_routes._session + assert session1 != session2 + session1 = client1.transport.compute_route_matrix._session + session2 = client2.transport.compute_route_matrix._session + assert session1 != session2 def test_routes_grpc_transport_channel(): @@ -1445,6 +1959,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -1462,6 +1977,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/release-please-config.json b/release-please-config.json index 8307f80e6529..a0b8e5be4b7f 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -1,102 +1,335 @@ { "packages": { + "packages/google-ai-generativelanguage": { + "component": "google-ai-generativelanguage", + "extra-files": [ + "google/ai/generativelanguage/gapic_version.py", + "google/ai/generativelanguage_v1beta2/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json", + "type": "json" + } + ], + "release-type": "python" + }, + "packages/google-apps-script-type": { + "component": "google-apps-script-type", + "extra-files": [ + "google/apps/script/type/calendar/gapic_version.py", + "google/apps/script/type/docs/gapic_version.py", + "google/apps/script/type/drive/gapic_version.py", + "google/apps/script/type/gapic_version.py", + "google/apps/script/type/gmail/gapic_version.py", + "google/apps/script/type/sheets/gapic_version.py", + "google/apps/script/type/slides/gapic_version.py" + ], + "release-type": "python" + }, + "packages/google-area120-tables": { + "component": "google-area120-tables", + "extra-files": [ + "google/area120/tables/gapic_version.py", + "google/area120/tables_v1alpha1/gapic_version.py" + ], + "release-type": "python" + }, + "packages/google-cloud-advisorynotifications": { + "component": "google-cloud-advisorynotifications", + "extra-files": [ + "google/cloud/advisorynotifications/gapic_version.py", + "google/cloud/advisorynotifications_v1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json", + "type": "json" + } + ], + "release-type": "python" + }, + "packages/google-cloud-alloydb": { + "component": "google-cloud-alloydb", + "extra-files": [ + "google/cloud/alloydb/gapic_version.py", + "google/cloud/alloydb_v1/gapic_version.py", + "google/cloud/alloydb_v1alpha/gapic_version.py", + "google/cloud/alloydb_v1beta/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json", + "type": "json" + }, + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json", + "type": "json" + }, + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json", + "type": "json" + } + ], + "release-type": "python" + }, + "packages/google-cloud-apigee-connect": { + "component": "google-cloud-apigee-connect", + "extra-files": [ + "google/cloud/apigeeconnect/gapic_version.py", + "google/cloud/apigeeconnect_v1/gapic_version.py" + ], + "release-type": "python" + }, + "packages/google-cloud-bigquery-biglake": { + "component": "google-cloud-bigquery-biglake", + "extra-files": [ + "google/cloud/bigquery/biglake/gapic_version.py", + "google/cloud/bigquery/biglake_v1/gapic_version.py", + "google/cloud/bigquery/biglake_v1alpha1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json", + "type": "json" + }, + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json", + "type": "json" + } + ], + "release-type": "python" + }, + "packages/google-cloud-confidentialcomputing": { + "component": "google-cloud-confidentialcomputing", + "extra-files": [ + "google/cloud/confidentialcomputing/gapic_version.py", + "google/cloud/confidentialcomputing_v1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json", + "type": "json" + } + ], + "release-type": "python" + }, + "packages/google-cloud-contentwarehouse": { + "component": "google-cloud-contentwarehouse", + "extra-files": [ + "google/cloud/contentwarehouse/gapic_version.py", + "google/cloud/contentwarehouse_v1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json", + "type": "json" + } + ], + "release-type": "python" + }, + "packages/google-cloud-datacatalog-lineage": { + "component": "google-cloud-datacatalog-lineage", + "extra-files": [ + "google/cloud/datacatalog/lineage/gapic_version.py", + "google/cloud/datacatalog/lineage_v1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json", + "type": "json" + } + ], + "release-type": "python" + }, "packages/google-cloud-discoveryengine": { "component": "google-cloud-discoveryengine", - "release-type": "python", "extra-files": [ "google/cloud/discoveryengine/gapic_version.py", + "google/cloud/discoveryengine_v1/gapic_version.py", + "google/cloud/discoveryengine_v1beta/gapic_version.py", { - "type": "json", + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json", + "type": "json" + }, + { + "jsonpath": "$.clientLibrary.version", "path": "samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json", - "jsonpath": "$.clientLibrary.version" + "type": "json" } - ] + ], + "release-type": "python" + }, + "packages/google-cloud-domains": { + "component": "google-cloud-domains", + "extra-files": [ + "google/cloud/domains/gapic_version.py", + "google/cloud/domains_v1/gapic_version.py", + "google/cloud/domains_v1beta1/gapic_version.py" + ], + "release-type": "python" }, "packages/google-cloud-enterpriseknowledgegraph": { "component": "google-cloud-enterpriseknowledgegraph", - "release-type": "python", "extra-files": [ "google/cloud/enterpriseknowledgegraph/gapic_version.py", + "google/cloud/enterpriseknowledgegraph_v1/gapic_version.py", { - "type": "json", + "jsonpath": "$.clientLibrary.version", "path": "samples/generated_samples/snippet_metadata_google.cloud.enterpriseknowledgegraph.v1.json", - "jsonpath": "$.clientLibrary.version" + "type": "json" } - ] + ], + "release-type": "python" }, - "packages/google-cloud-contentwarehouse": { - "component": "google-cloud-contentwarehouse", - "release-type": "python", + "packages/google-cloud-iap": { + "component": "google-cloud-iap", "extra-files": [ - "google/cloud/contentwarehouse/gapic_version.py", + "google/cloud/iap/gapic_version.py", + "google/cloud/iap_v1/gapic_version.py" + ], + "release-type": "python" + }, + "packages/google-cloud-ids": { + "component": "google-cloud-ids", + "extra-files": [ + "google/cloud/ids/gapic_version.py", + "google/cloud/ids_v1/gapic_version.py" + ], + "release-type": "python" + }, + "packages/google-cloud-rapidmigrationassessment": { + "component": "google-cloud-rapidmigrationassessment", + "extra-files": [ + "google/cloud/rapidmigrationassessment/gapic_version.py", + "google/cloud/rapidmigrationassessment_v1/gapic_version.py", { - "type": "json", - "path": "samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json", - "jsonpath": "$.clientLibrary.version" + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.rapidmigrationassessment.v1.json", + "type": "json" + } + ], + "release-type": "python" + }, + "packages/google-cloud-storageinsights": { + "component": "google-cloud-storageinsights", + "extra-files": [ + "google/cloud/storageinsights/gapic_version.py", + "google/cloud/storageinsights_v1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.storageinsights.v1.json", + "type": "json" + } + ], + "release-type": "python" + }, + "packages/google-cloud-support": { + "component": "google-cloud-support", + "extra-files": [ + "google/cloud/support/gapic_version.py", + "google/cloud/support_v2/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.support.v2.json", + "type": "json" + } + ], + "release-type": "python" + }, + "packages/google-cloud-vmwareengine": { + "component": "google-cloud-vmwareengine", + "extra-files": [ + "google/cloud/vmwareengine/gapic_version.py", + "google/cloud/vmwareengine_v1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.vmwareengine.v1.json", + "type": "json" + } + ], + "release-type": "python" + }, + "packages/google-cloud-workstations": { + "component": "google-cloud-workstations", + "extra-files": [ + "google/cloud/workstations/gapic_version.py", + "google/cloud/workstations_v1/gapic_version.py", + "google/cloud/workstations_v1beta/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.workstations.v1.json", + "type": "json" + }, + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.workstations.v1beta.json", + "type": "json" } - ] + ], + "release-type": "python" }, "packages/google-geo-type": { "component": "google-geo-type", - "release-type": "python", "extra-files": [ "google/geo/type/gapic_version.py" - ] + ], + "release-type": "python" }, "packages/google-maps-addressvalidation": { "component": "google-maps-addressvalidation", - "release-type": "python", "extra-files": [ "google/maps/addressvalidation/gapic_version.py", + "google/maps/addressvalidation_v1/gapic_version.py", { - "type": "json", + "jsonpath": "$.clientLibrary.version", "path": "samples/generated_samples/snippet_metadata_google.maps.addressvalidation.v1.json", - "jsonpath": "$.clientLibrary.version" + "type": "json" } - ] + ], + "release-type": "python" }, - "packages/google-maps-routing": { - "component": "google-maps-routing", - "release-type": "python", + "packages/google-maps-mapsplatformdatasets": { + "component": "google-maps-mapsplatformdatasets", "extra-files": [ - "google/maps/routing/gapic_version.py", + "google/maps/mapsplatformdatasets/gapic_version.py", + "google/maps/mapsplatformdatasets_v1/gapic_version.py", + "google/maps/mapsplatformdatasets_v1alpha/gapic_version.py", { - "type": "json", - "path": "samples/generated_samples/snippet_metadata_google.maps.routing.v2.json", - "jsonpath": "$.clientLibrary.version" + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1alpha.json", + "type": "json" + }, + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.maps.mapsplatformdatasets.v1.json", + "type": "json" } - ] + ], + "release-type": "python" }, - "packages/google-apps-script-type": { - "component": "google-apps-script-type", - "release-type": "python", + "packages/google-maps-places": { + "component": "google-maps-places", "extra-files": [ - "google/apps/script/type/gapic_version.py" - ] - }, - "packages/google-cloud-vmwareengine": { - "component": "google-cloud-vmwareengine", - "release-type": "python", - "extra-files": [ - "google/cloud/vmwareengine/gapic_version.py", + "google/maps/places/gapic_version.py", + "google/maps/places_v1/gapic_version.py", { - "type": "json", - "path": "samples/generated_samples/snippet_metadata_google.cloud.vmwareengine.v1.json", - "jsonpath": "$.clientLibrary.version" + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.maps.places.v1.json", + "type": "json" } - ] + ], + "release-type": "python" }, - "packages/google-cloud-datacatalog-lineage": { - "component": "google-cloud-datacatalog-lineage", - "release-type": "python", + "packages/google-maps-routing": { + "component": "google-maps-routing", "extra-files": [ - "google/cloud/datacatalog/lineage/gapic_version.py", + "google/maps/routing/gapic_version.py", + "google/maps/routing_v2/gapic_version.py", { - "type": "json", - "path": "samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json", - "jsonpath": "$.clientLibrary.version" + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.maps.routing.v2.json", + "type": "json" } - ] + ], + "release-type": "python" } } -} \ No newline at end of file +} diff --git a/scripts/requirements.txt b/scripts/requirements.txt index ba07d192a2d5..2c24336eb316 100644 --- a/scripts/requirements.txt +++ b/scripts/requirements.txt @@ -1 +1 @@ -requests==2.28.2 +requests==2.31.0 diff --git a/scripts/split_repo_migration/README.md b/scripts/split_repo_migration/README.md new file mode 100644 index 000000000000..2ccca1d40917 --- /dev/null +++ b/scripts/split_repo_migration/README.md @@ -0,0 +1,36 @@ +The directory contains scripts which can be used to migrate client library code +from split repositories located at http://github.com/googleapis/python-* into +this mono repository. Each such split repo becomes a "package" (under +`packages/`) in this repo. + +These two scripts should be called one after the other for each split repo being +migrated, but this process can be done for multiple repos at once. In other +words, the order in which the PRs are created or merged does not matter: + - [single-library.git-migrate-history.sh](single-library.git-migrate-history.sh) + imports the files in the split repo into a package in this repo. In doing + so, it also imports the associated git history. This script opens a PR, which + must be merge-committed once approved, in order to preserve that history. + - [single-library.post-process.api-files.sh](single-library.post-process.api-files.sh) + applies needed package-specific migration changes that only touch files + specific to the repo being migrated. It locally commits the changes to this + repo but does not open a new PR. Typically this commit is included in the PR + created by the previous script. + +The output of this script should be merged before this script is called again, +as it modifies shared files: + - [multiple-library.post-process.sh](multiple-library.post-process.sh), for a + given list of migrated packages, applies package-specific migration changes + that touch files shared among all packages in this repo. It does this by + first calling the script below for each specified package, and then invoking + OwlBot locally to post-process all the specified packages at once. It + locally commits the changes to this repo but does not open a PR. + - [single-library.post-process.common-files.sh](single-library.post-process.common-files.sh) + applies package-specific migration changes that touch files shared among + multiple packages. It locally commits the changes to this repo but does + not open a PR. + +The following script cleans up the now-migrated split repo: +- [delete-everything-split-repo.sh](delete-everything-split-repo.sh) will remove + almost all files from a split repository and update the README.rst file to + indicate that the repository is archived. + diff --git a/scripts/split_repo_migration/delete-everything-split-repo.sh b/scripts/split_repo_migration/delete-everything-split-repo.sh new file mode 100755 index 000000000000..bac2a672080d --- /dev/null +++ b/scripts/split_repo_migration/delete-everything-split-repo.sh @@ -0,0 +1,137 @@ +#!/bin/bash +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This script was written as part of a project to migrate +# split repositories to a mono repository google-cloud-python. +# This script will delete all files in a split repository and +# update the README to indicate that the repository is archived. + +# sourced vs execution detection obtained from https://stackoverflow.com/a/28776166 +SOURCED=0 +if [ -n "$ZSH_VERSION" ]; then + case $ZSH_EVAL_CONTEXT in *:file) SOURCED=1;; esac +elif [ -n "$KSH_VERSION" ]; then + [ "$(cd -- "$(dirname -- "$0")" && pwd -P)/$(basename -- "$0")" != "$(cd -- "$(dirname -- "${.sh.file}")" && pwd -P)/$(basename -- "${.sh.file}")" ] && SOURCED=1 +elif [ -n "$BASH_VERSION" ]; then + (return 0 2>/dev/null) && SOURCED=1 +else # All other shells: examine $0 for known shell binary filenames. + # Detects `sh` and `dash`; add additional shell filenames as needed. + case ${0##*/} in sh|-sh|dash|-dash) SOURCED=1;; esac +fi + +(( ${SOURCED} != 1 )) || { \ + echo "Please do not source this script, but execute it directly." + return -10 +} + +# We require executing the script so that an early exit (explicitly or via -e) +# does not kill the user's shell. + +# `-e` enables the script to automatically fail when a command fails +set -e + +# Ensure that both arguments are provided +if [ $# -lt 2 ] +then + echo "Usage: $0 + +Args: + split-repo-name: The name of the split repository. For example, \`python-access-approval\`. + artifact-name: The name of the artifact on PyPI. For example, \`google-cloud-access-approval\`. +" + exit 1 +fi + +# The name of the split repository. (e.g. python-access-approval) +SPLIT_REPO=$1 +# The name of the artifact on PyPI. (e.g. google-cloud-access-approval) +ARTIFACT_NAME=$2 + +SPLIT_REPO_DIR="${SPLIT_REPO_DIR:-/tmp/delete-library.${SPLIT_REPO}.$(date +"%Y-%m-%d.%H:%M:%S")}" + +git clone "git@github.com:googleapis/${SPLIT_REPO}.git" $SPLIT_REPO_DIR + +cd $SPLIT_REPO_DIR + +# Create a git branch +BRANCH_NAME="migrate-library.$(date +'%Y-%m-%d.%H-%M-%S')" +git checkout -b ${BRANCH_NAME} + +# Quick check to make sure there are no handwritten samples. +# Handwritten samples should be migrated to python-docs-samples before repository code is deleted. +# Exclude the samples/generated_samples/* directory which contains autogenerated samples. +NUM_PY_SAMPLES_FILES=$(find samples -type f -name "*.py" ! -path "samples/generated_samples/*" | grep -v "nox.*\.py" | grep -v "__init__\.py" | wc -l) + +# Fail if there are handwritten samples. +if [[ $NUM_PY_SAMPLES_FILES -ne "0" ]]; then + echo "Please migrate handwritten samples to python-docs-samples before deleting the source." + exit 1 +fi + +# Fail if this repository has a customized version of the autogenerated code. +# This specific check is to ensure that autogenerated files are not excluded. +STANDARD_EXCLUDES=$(grep owlbot.py -e 'excludes=\["\*\*/gapic_version.py"]' | wc -l) + +if [[ $STANDARD_EXCLUDES -ne "1" ]]; then + echo "A non-standard value for \`excludes\` is not supported in the repository" + exit 1 +fi + +# Fail if this repository has a customized version of the autogenerated code. +# This specific check is to make sure that the owlbot.py file does not contain `s.replace` or `sed` +# which has historically been used to modify the generated code. +NUM_STRING_REPLACEMENTS=$(grep owlbot.py -e 's.replace' | wc -l) +if [[ $NUM_STRING_REPLACEMENTS -ne "0" ]]; then + echo "s.replace() is not supported in the python mono repository" + exit 1 +fi + +NUM_STRING_REPLACEMENTS=$(grep owlbot.py -we 'sed' | wc -l) +if [[ $NUM_STRING_REPLACEMENTS -ne "0" ]]; then + echo "s.replace() is not supported in the python mono repository" + exit 1 +fi + +# Delete everything except LICENSE, SECURITY.md, README.rst, .git and .repo-metadata.json +ls -A | grep -v 'LICENSE\|SECURITY.md\|README.rst\|.git\|.repo-metadata.json' | xargs rm -rf + +# Additional cleanup for files/folders with .git prefix +rm -rf .github +rm -rf .gitignore + +# Append a prefix to the README to state that this repository has been archived and the source has moved. +README_RST="${SPLIT_REPO_DIR}/README.rst" + +if [[ ! -f $README_RST ]]; then + echo "README.rst file not found" + exit 1 +fi + +{ cat - $README_RST | sponge $README_RST ; } << EOF +:**NOTE**: **This github repository is archived. The repository contents and history have moved to** \`google-cloud-python\`_. + +.. _google-cloud-python: https://github.com/googleapis/google-cloud-python/tree/main/packages/${ARTIFACT_NAME} + + +EOF + +# Stage all files +git add . + +# Commit all files +git commit -m 'build: update README to indicate that source has moved and delete all files' + +# Push the branch. Note force push option is used to allow this script to be called multiple times. +git push origin -f ${BRANCH_NAME} diff --git a/scripts/split_repo_migration/multiple-library.post-process.sh b/scripts/split_repo_migration/multiple-library.post-process.sh new file mode 100755 index 000000000000..6cd99b18cdfb --- /dev/null +++ b/scripts/split_repo_migration/multiple-library.post-process.sh @@ -0,0 +1,125 @@ +#!/bin/bash +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This script was written as part of a project to migrate +# split repositories to the mono-repository google-cloud-python. +# +# This script takes a list of packages. It will first call +# single-library.post-process.common-files.sh on each package to update the +# various metadata files migrated from the split repo. It will then cause the +# OwlBot post-processor to run once (via Docker) for all the repos together to +# update the appropriate files. +# +# This script deals with common (shared) files, and thus CANNOT be called in +# parallel for individual APIs, since each run touches shared files and would +# thus lead to merge conflicts. +# +# Pre-condition: the split repo has been copied to the path indicated when +# calling this function. +# +# INVOCATION +# multiple-library.post-process.common-files.sh DIR PACKAGE [PACKAGE...] +# where dir is the path to the monorepo (relative dirs, like "." are fine) and +# PACKAGE is the name of a (newly migrated) package in the monorepo. +# EXAMPLE from the root directory of the monorepo: +# ./scripts/split_repo_migration/multiple-library.post-process.common-files.sh . google-cloud-speech +# + +# sourced vs execution detection obtained from https://stackoverflow.com/a/28776166 +SOURCED=0 +if [ -n "$ZSH_VERSION" ]; then + case $ZSH_EVAL_CONTEXT in *:file) SOURCED=1;; esac +elif [ -n "$KSH_VERSION" ]; then + [ "$(cd -- "$(dirname -- "$0")" && pwd -P)/$(basename -- "$0")" != "$(cd -- "$(dirname -- "${.sh.file}")" && pwd -P)/$(basename -- "${.sh.file}")" ] && SOURCED=1 +elif [ -n "$BASH_VERSION" ]; then + (return 0 2>/dev/null) && SOURCED=1 +else # All other shells: examine $0 for known shell binary filenames. + # Detects `sh` and `dash`; add additional shell filenames as needed. + case ${0##*/} in sh|-sh|dash|-dash) SOURCED=1;; esac +fi + +(( ${SOURCED} != 1 )) || { \ + echo "Please do not source this script, but execute it directly." + return -10 +} + +# to allow echoing commands for debugging, precede the following with something +# like "echo" +GIT="git" + +# We require executing the script so that an early exit (explicitly or via -e) +# does not kill the user's shell. + +# `-e` enables the script to automatically fail when a command fails +set -e + +MESSAGE="" + +if [ $# -lt 2 ] +then + echo "Usage: $0 REPO_ROOT PACKAGE...." + exit 1 +fi + +PATH_MONOREPO="$(realpath "$1")" +cat <& /dev/null + +BRANCH="migration-batch-${USER}-$$" +${GIT} checkout -b "${BRANCH}" + +# variable naming convention +# PATH_* are absolute system paths +# MONOREPO_PATH_* are paths relative to the root of the monorepo +# MONOREPO_* are values used in the monorepo + +for MONOREPO_PACKAGE_NAME in "$@" +do + PATH_PACKAGE="$(realpath "packages/${MONOREPO_PACKAGE_NAME}")" + MONOREPO_PATH_PACKAGE="packages/${MONOREPO_PACKAGE_NAME}" + cat <& /dev/null # "${PATH_MONOREPO}" + +[[ -z ${MESSAGE} ]] && echo "Done." || echo -e "Done, with a message:\n\n${MESSAGE}\n" diff --git a/scripts/split_repo_migration/single-library.git-migrate-history.sh b/scripts/split_repo_migration/single-library.git-migrate-history.sh new file mode 100755 index 000000000000..88e5a67232a4 --- /dev/null +++ b/scripts/split_repo_migration/single-library.git-migrate-history.sh @@ -0,0 +1,179 @@ +#!/bin/bash +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Sample usage: +# +# git-migrate-history.sh \ +# "googleapis/python-speech" \ +# "~/temp/google-cloud-python" \ +# "" \ +# "" \ +# ".github/.OwlBot.yaml" +# +# This invocation runs migration script on python-speech, where the source path is root directory, +# nothing is ignored, and .OwlBot.yaml is being kept. + +set -ex + +[[ "${BASH_SOURCE[0]}" != "${0}" ]] && EXIT=return || EXIT=exit + +if [ $# -lt 3 ] +then + echo "Usage: $0 [folders,to,skip] [files,to,keep] [branch-name] [issue-number]" + $EXIT 1 +fi + +# source GitHub repository. format: / +SOURCE_REPO=$1 + +# destination GitHub repository. format: / +TARGET_REPO=$2 # Path to google-cloud-python + +# path in the source repo to copy code from. Defaults to the root directory +SOURCE_PATH=$3 + +# comma-separated list of files/folders to skip +IGNORE_FOLDERS=$4 + +# keep these specific files that would otherwise be deleted by IGNORE_FOLDERS +KEEP_FILES=$5 + +# override the HEAD branch name for the migration PR +BRANCH=$6 + +# GitHub issue number +ISSUE_NUMBER=$7 + +if [[ -z "${BRANCH}" ]] +then + # default the branch name to be generated from the source repo name + BRANCH=$(basename ${SOURCE_REPO})-migration +fi +export FILTER_BRANCH_SQUELCH_WARNING=1 + +# create a working directory +WORKDIR="$(mktemp -d -t code-migration-XXXXXXXXXX)" +echo "Created working directory: ${WORKDIR}" + +pushd "${WORKDIR}" # cd into workdir + +echo "Cloning source repository: ${SOURCE_REPO}" +git clone "git@github.com:${SOURCE_REPO}.git" source-repo + +pushd source-repo + +DISTRIBUTION_NAME=$(jq -r '.distribution_name' .repo-metadata.json) # -r removes quotes around the name. +TARGET_PATH="packages/${DISTRIBUTION_NAME}" + +git remote remove origin + +# prune only files within the specified directory +if [[ ! -z "${SOURCE_PATH}" ]] +then + echo "Pruning commits only including path: ${SOURCE_PATH}" + git filter-branch \ # rewrites history... unsafe + --prune-empty \ + --subdirectory-filter "${SOURCE_PATH}" +fi + +if [[ ! -z "${IGNORE_FOLDERS}" ]] +then + echo "Ignoring folder: ${IGNORE_FOLDERS}" + mkdir -p "${WORKDIR}/filtered-source" + FOLDERS=$(echo ${IGNORE_FOLDERS} | tr "," " ") + # remove files/folders we don't want + FILTER="(rm -rf ${FOLDERS} || true)" + if [[ ! -z "${KEEP_FILES}" ]] + then + KEEP_FILES_SPACES=($(echo ${KEEP_FILES} | tr "," " ")) + LAST_ELEMENT=$(( ${#KEEP_FILES_SPACES[@]} - 1 )) + KEEP_FILE_COMMANDS="" + for file in "${KEEP_FILES_SPACES[@]}" + do + if [[ $file == "${KEEP_FILES_SPACES[$LAST_ELEMENT]}" ]] + then + KEEP_FILE_COMMANDS+="git checkout -- ${file} 2>/dev/null || true" + else + KEEP_FILE_COMMANDS+="git checkout -- ${file} 2>/dev/null || true; " + fi + done + # restore files to keep, silence errors if the file doesn't exist + FILTER="${FILTER}; ${KEEP_FILE_COMMANDS}" + fi + git filter-branch \ + --force \ + --prune-empty \ + --tree-filter "${FILTER}" +fi + +# reorganize the filtered code into the desired target locations +echo "Moving files to destination path: ${TARGET_PATH}" +git filter-branch \ + --force \ + --prune-empty \ + --tree-filter \ + "shopt -s dotglob; mkdir -p ${WORKDIR}/migrated-source; mv * ${WORKDIR}/migrated-source; mkdir -p ${TARGET_PATH}; mv ${WORKDIR}/migrated-source/* ${TARGET_PATH}" + +# back to workdir +popd + +# merge histories +pushd $TARGET_REPO + +REMOTE="remote.${SOURCE_REPO}" +git remote add --fetch ${REMOTE} ${WORKDIR}/source-repo +git checkout -B "${BRANCH}" +git merge --allow-unrelated-histories ${REMOTE}/main --no-edit + +echo "Success" + +popd # back to workdir + +# Do a diff between source code split repo and migrated code. +git clone "git@github.com:${SOURCE_REPO}.git" source-repo-validation # Not ideal to clone again. +rm -rf source-repo-validation/.git # That folder is not needed for validation. + +DIFF_FILE="${WORKDIR}/diff.txt" +if diff -r "${TARGET_REPO}/${TARGET_PATH}" source-repo-validation > "${DIFF_FILE}" ; then + echo "No diff" +else + echo "Diff non-empty. See ${DIFF_FILE}" + $EXIT 1 +fi + +pushd "${TARGET_REPO}" >& /dev/null # To target repo + +# For postprocessing of the batch migration script. +mkdir -p owl-bot-staging/${DISTRIBUTION_NAME}/${DISTRIBUTION_NAME} +touch owl-bot-staging/${DISTRIBUTION_NAME}/${DISTRIBUTION_NAME}/${DISTRIBUTION_NAME}.txt +git add owl-bot-staging +git commit -m "Trigger owlbot post-processor" + +git push -u origin "${BRANCH}" --force + +# create pull request +if which gh > /dev/null +then + gh pr create --title "chore(migration): Migrate code from ${SOURCE_REPO} into ${TARGET_PATH}" --body "$(echo "See #${ISSUE_NUMBER}. \n\nThis PR should be merged with a merge-commit, not a squash-commit, in order to preserve the git history.")" +else + hub pull-request -m "migrate code from ${SOURCE_REPO}" +fi + +popd >& /dev/null + +# Some of the post-processing scripts require the ${DISTRIBUTION_NAME}. We +# output it here so they can use the same value we derived, rather than risking +# diverging computations of the value. +echo "${DISTRIBUTION_NAME}" diff --git a/scripts/split_repo_migration/single-library.post-process.api-files.sh b/scripts/split_repo_migration/single-library.post-process.api-files.sh new file mode 100755 index 000000000000..25426d6116c0 --- /dev/null +++ b/scripts/split_repo_migration/single-library.post-process.api-files.sh @@ -0,0 +1,214 @@ +#!/bin/bash +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This script was written as part of a project to migrate +# split repositories to the mono-repository google-cloud-python. +# +# This script will update the various metadata files migrated from the split +# repo into formats and locations suitable for the mono-repo. This script deals +# with API-specific file, and can be invoked on many just-migrated APIs in any +# order. There is a companion scripts. *.common-files.sh, which must be invoked +# for all APIs to finish the migration to common, but which cannot be +# parallelized, as it touches shared files. +# +# Pre-condition: the split repo has been copied to the path indicated when +# calling this function. +# +# INVOCATION from the mono-repo root directory: +# single-library.post-process.api-files.sh PACKAGE_PATH +# where PACKAGE_PATH is the path (absolute or relative) from pwd to the +# directory in google-cloud-python holding the copied split repo. Typically, if +# running from the top level of google-cloud-python, this is something like +# "packages/google-cloud-APINAME" +# EXAMPLE from the root directory of the monorepo: +# ./scripts/split_repo_migration/single-library.post-process.api-files.sh packages/google-cloud-speech +# +# For debugging/developing this script, you can have additional parameters: +# ./single-library.post-process.api-files.sh SPLIT_REPO_DIR MONOREPO_DIR MONOREPO_PACKAGE_NAME +# Example from this script's directory: +# ./single-library.post-process.api-files.sh ../../../python-speech ../../ google-cloud-speech + +# sourced vs execution detection obtained from https://stackoverflow.com/a/28776166 +SOURCED=0 +if [ -n "$ZSH_VERSION" ]; then + case $ZSH_EVAL_CONTEXT in *:file) SOURCED=1;; esac +elif [ -n "$KSH_VERSION" ]; then + [ "$(cd -- "$(dirname -- "$0")" && pwd -P)/$(basename -- "$0")" != "$(cd -- "$(dirname -- "${.sh.file}")" && pwd -P)/$(basename -- "${.sh.file}")" ] && SOURCED=1 +elif [ -n "$BASH_VERSION" ]; then + (return 0 2>/dev/null) && SOURCED=1 +else # All other shells: examine $0 for known shell binary filenames. + # Detects `sh` and `dash`; add additional shell filenames as needed. + case ${0##*/} in sh|-sh|dash|-dash) SOURCED=1;; esac +fi + +(( ${SOURCED} != 1 )) || { \ + echo "Please do not source this script, but execute it directly." + return -10 +} + +# We require executing the script so that an early exit (explicitly or via -e) +# does not kill the user's shell. + +# `-e` enables the script to automatically fail when a command fails +set -e + + +NOP="echo -n" +DEBUG="" # "yes" # set to blank for a real run, or non-blank to prevent modifying the split-repo +MESSAGE="" + +[[ -n ${DEBUG} ]] && { + # Comment out some commands for development/debugging + RM='echo "*** would run:" rm' + GIT='echo "*** would run:" git' + + # It's easier to debug if keys don't get reordered + SORT_JSON_KEYS="" +} || { + # Real commands for actual runs + RM='rm' + GIT='git' + + # Sort keys for easier look-up. + SORT_JSON_KEYS="--sort-keys" +} + +if [ $# -lt 1 ] +then + echo "Usage: $0 PATH/TO/PACKAGE_NAME" + exit 1 +fi + +# variable naming convention +# PATH_* are absolute system paths +# MONOREPO_PATH_* are paths relative to the root of the monorepo +# MONOREPO_* are values used in the monorepo + +# In a real run for a split repo already copied to the mono-repo, $1 would be +# specified as `packages/PACKAGE_NAME`. For developing this script, it's useful +# to specify $1 as the name of a completely separate split repo, in which case +# specifying $2 and $3 is also useful (see below). +PATH_PACKAGE="$(realpath "$1")" + +# The optional second argument is useful for development: it specifies a different +# directory for the monorepo (rather than deriving it from $1). +PATH_MONOREPO="${2:-$(dirname $(dirname $PATH_PACKAGE))}" +PATH_MONOREPO="$(realpath "${PATH_MONOREPO}")" + +# The optional third argument is useful for development: it specifies what the the +# package name should be for this API once migrated to the monorepo. This can be +# inferred if we've already migrated the split repo to the correct location, but +# otherwise needs to be specified, since it differs from the split repo name +# (for example, the repo "python-speech" defines a package +# "google-cloud-speech"). +MONOREPO_PACKAGE_NAME="${3:-$(basename ${PATH_PACKAGE})}" + +MONOREPO_PATH_PACKAGE="packages/${MONOREPO_PACKAGE_NAME}" + +cat <& /dev/null + +## START system tests check ######################################## +# variable prefix: TST_* + +# If there are integration tests, do not proceed with the script. + +TST_MONO_TESTDIR="${MONOREPO_PATH_PACKAGE}/tests/" +TST_MONO_SYSTEM_DIR="${TST_MONO_TESTDIR}system" +TST_MONO_SYSTEM_FILE="${TST_MONO_TESTDIR}system.py" +echo "Checking for system tests in ${TST_MONO_TESTDIR}" + +[[ ! -f ${TST_MONO_SYSTEM_FILE} ]] || \ + { echo "ERROR: ${TST_MONO_SYSTEM_FILE} exists. Need to manually deal with that." ; return -10 ; } +[[ ! -d ${TST_MONO_SYSTEM_DIR} ]] || \ + { echo "ERROR: ${TST_MONO_SYSTEM_DIR} exists. Need to manually deal with that." ; return -11 ; } +## END system tests check + +## START owlbot.yaml migration ######################################## +# variable prefix: OWY_* +# FIXME: KEEP? +OWY_MONO_PATH="${MONOREPO_PATH_PACKAGE}/.OwlBot.yaml" +echo "Migrating: ${OWY_MONO_PATH}" + +OWY_SPLIT_PATH="${PATH_PACKAGE}/.github/.OwlBot.yaml" +[[ ! -f ${OWY_SPLIT_PATH} ]] || { + mkdir -p $(dirname ${OWY_MONO_PATH}) + cp ${OWY_SPLIT_PATH} ${OWY_MONO_PATH} + + # remove `docker:` line + sed -i "/docker:/d" "${OWY_MONO_PATH}" + # remove `image:` line + sed -i "/image:/d" "${OWY_MONO_PATH}" + + # The order of lines A and B below matters, since we DO want lines affected by + # A to be further affected by B. + sed -i 's|\.\*-py/(.*)|.*-py|' "${OWY_MONO_PATH}" + sed -i "s|dest: /owl-bot-staging/\$1/\$2|dest: /owl-bot-staging/\$1|" "${OWY_MONO_PATH}" # A + sed -i "s| /owl-bot-staging/| /owl-bot-staging/${MONOREPO_PACKAGE_NAME}/|g" "${OWY_MONO_PATH}" # B + + # TODO: Review the following: For consistency with NodeJS migration script: + # - we are not removing `begin-after-commit-hash` + # - we are not removing `deep-remove-regex`, even though it refers to a non-API-specific directory. + + $RM ${OWY_SPLIT_PATH} +} +## END owlbot.yaml migration + + +## START owlbot.py deletion ######################################## +# variable prefix: OWP_* +OWP_MONO_PATH="${MONOREPO_PATH_PACKAGE}/owlbot.py" +echo "Migrating: ${OWP_MONO_PATH}" + +[[ -z ${DEBUG} ]] || { \ + OWP_SPLIT_PATH="${PATH_PACKAGE}/owlbot.py" + [[ ! -f "${OWP_SPLIT_PATH}" ]] || cp -u ${OWP_SPLIT_PATH} ${OWP_MONO_PATH} +} + +[[ ! -f "${OWP_MONO_PATH}" ]] || { + MESSAGE="${MESSAGE}\n\nWARNING: Deleted ${OWP_MONO_PATH}" + rm -rf "${OWP_MONO_PATH}" +} +## END owlbot.py deletion + +## START delete doc/changelog.md .github/ .kokoro/#################### +${RM} -f "${PATH_PACKAGE}/docs/changelog.md" +${RM} -rf "${PATH_PACKAGE}/.github" +${RM} -rf "${PATH_PACKAGE}/.kokoro" +${RM} -f "${PATH_PACKAGE}/.trampolinerc" +${RM} -f "${PATH_PACKAGE}/${MONOREPO_PACKAGE_NAME}.txt" + +## END delete doc/changelog.md .github/ .kokoro/ + +## START commit changes ############################################# +echo "Committing changes locally" +${GIT} add . +${GIT} commit -am "build: ${MONOREPO_PACKAGE_NAME} migration: adjust owlbot-related files" || \ + echo "No changes to commit" +## END commit changes + +popd >& /dev/null # "${PATH_MONOREPO}" + +[[ -z ${MESSAGE} ]] && echo "Done." || echo -e "Done, with a message:\n\n${MESSAGE}\n" + + + diff --git a/scripts/split_repo_migration/single-library.post-process.common-files.sh b/scripts/split_repo_migration/single-library.post-process.common-files.sh new file mode 100755 index 000000000000..cd25c91c6a10 --- /dev/null +++ b/scripts/split_repo_migration/single-library.post-process.common-files.sh @@ -0,0 +1,248 @@ +#!/bin/bash +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This script was written as part of a project to migrate +# split repositories to the mono-repository google-cloud-python. +# +# This script will update the various metadata files migrated from the split +# repo into formats and locations suitable for the mono-repo. This script deals +# with common (shared) files, and thus CANNOT be called in parallel for +# individual APIs, since each run touches shared files and would thus lead to +# merge conflicts. +# +# Pre-condition: the split repo has been copied to the path indicated when +# calling this function. +# +# **** NOTE: You would typically not call this directly unless debugging. Use +# multiple-library.post-process.common-files.sh instead. **** +# +# INVOCATION from the mono-repo root directory: +# single-library.post-process.common-files.sh PACKAGE_PATH +# where PACKAGE_PATH is the path (absolute or relative) from pwd to the +# directory in google-cloud-python holding the copied split repo. Typically, if +# running from the top level of google-cloud-python, this is something like +# "packages/google-cloud-APINAME" +# EXAMPLE from the root directory of the monorepo: +# ./scripts/split_repo_migration/single-library.post-process.common-files.sh packages/google-cloud-speech +# +# For debugging/developing this script, you can have additional parameters: +# ./single-library.post-process.common-files.sh SPLIT_REPO_DIR MONOREPO_DIR MONOREPO_PACKAGE_NAME +# Example from this script's directory: +# ./single-library.post-process.common-files.sh ../../../python-speech ../../ google-cloud-speech + +# sourced vs execution detection obtained from https://stackoverflow.com/a/28776166 +SOURCED=0 +if [ -n "$ZSH_VERSION" ]; then + case $ZSH_EVAL_CONTEXT in *:file) SOURCED=1;; esac +elif [ -n "$KSH_VERSION" ]; then + [ "$(cd -- "$(dirname -- "$0")" && pwd -P)/$(basename -- "$0")" != "$(cd -- "$(dirname -- "${.sh.file}")" && pwd -P)/$(basename -- "${.sh.file}")" ] && SOURCED=1 +elif [ -n "$BASH_VERSION" ]; then + (return 0 2>/dev/null) && SOURCED=1 +else # All other shells: examine $0 for known shell binary filenames. + # Detects `sh` and `dash`; add additional shell filenames as needed. + case ${0##*/} in sh|-sh|dash|-dash) SOURCED=1;; esac +fi + +(( ${SOURCED} != 1 )) || { \ + echo "Please do not source this script, but execute it directly." + return -10 +} + +# We require executing the script so that an early exit (explicitly or via -e) +# does not kill the user's shell. + +# `-e` enables the script to automatically fail when a command fails +set -e + + +NOP="echo -n" +DEBUG="" # "yes" # set to blank for a real run, or non-blank to prevent modifying the split-repo +MESSAGE="" + +# Use indent of 4 when formatting json for readability +INDENT_JSON="--indent 4" + +[[ -n ${DEBUG} ]] && { + # Comment out some commands for development/debugging + RM='echo "*** would run:" rm' + GIT='echo "*** would run:" git' + + # It's easier to debug if keys don't get reordered + SORT_JSON_KEYS="" +} || { + # Real commands for actual runs + RM='rm' + GIT='git' + + # Sort keys for easier look-up. + SORT_JSON_KEYS="--sort-keys" +} + +if [ $# -lt 1 ] +then + echo "Usage: $0 PATH/TO/PACKAGE_NAME" + exit 1 +fi + +# variable naming convention +# PATH_* are absolute system paths +# MONOREPO_PATH_* are paths relative to the root of the monorepo +# MONOREPO_* are values used in the monorepo + +# In a real run for a split repo already copied to the mono-repo, $1 would be +# specified as `packages/PACKAGE_NAME`. For developing this script, it's useful +# to specify $1 as the name of a completely separate split repo, in which case +# specifying $2 and $3 is also useful (see below). +PATH_PACKAGE="$(realpath "$1")" + +# The optional second argument is useful for development: it specifies a different +# directory for the monorepo (rather than deriving it from $1). +PATH_MONOREPO="${2:-$(dirname $(dirname $PATH_PACKAGE))}" +PATH_MONOREPO="$(realpath "${PATH_MONOREPO}")" + +# The optional third argument is useful for development: it specifies what the the +# package name should be for this API once migrated to the monorepo. This can be +# inferred if we've already migrated the split repo to the correct location, but +# otherwise needs to be specified, since it differs from the split repo name +# (for example, the repo "python-speech" defines a package +# "google-cloud-speech"). +MONOREPO_PACKAGE_NAME="${3:-$(basename ${PATH_PACKAGE})}" + +MONOREPO_PATH_PACKAGE="packages/${MONOREPO_PACKAGE_NAME}" + +cat <& /dev/null + +## START system tests check ######################################## +# variable prefix: TST_* + +# If there are integration tests, do not proceed with the script. + +TST_MONO_TESTDIR="${MONOREPO_PATH_PACKAGE}/tests/" +TST_MONO_SYSTEM_DIR="${TST_MONO_TESTDIR}system" +TST_MONO_SYSTEM_FILE="${TST_MONO_TESTDIR}system.py" +echo "Checking for system tests in ${TST_MONO_TESTDIR}" + +[[ ! -f ${TST_MONO_SYSTEM_FILE} ]] || \ + { echo "ERROR: ${TST_MONO_SYSTEM_FILE} exists. Need to manually deal with that." ; return -10 ; } +[[ ! -d ${TST_MONO_SYSTEM_DIR} ]] || \ + { echo "ERROR: ${TST_MONO_SYSTEM_DIR} exists. Need to manually deal with that." ; return -11 ; } +## END system tests check + +## START release-please config migration ######################################## +# variable prefix: RPC_* + +RPC_MONO_PATH="release-please-config.json" +echo "Migrating: ${RPC_MONO_PATH}" + +# enable this if we want sorted keys. Keep it disabled to append new entries at +# the end (useful for debugging): +RPC_SORT_KEYS="${SORT_JSON_KEYS}" + +RPC_SPLIT_PATH="${PATH_PACKAGE}/release-please-config.json" +jq ${INDENT_JSON} ".packages.\".\" += {component: \"${MONOREPO_PACKAGE_NAME}\"}" "${RPC_SPLIT_PATH}" | sponge "${RPC_SPLIT_PATH}" +RPC_NEW_OBJECT="$(jq '.packages."."' "${RPC_SPLIT_PATH}")" + +jq ${INDENT_JSON} ${RPC_SORT_KEYS} --argjson newObject "${RPC_NEW_OBJECT}" ". * {\"packages\": {\"${MONOREPO_PATH_PACKAGE}\": \$newObject}}" ${RPC_MONO_PATH} | sponge ${RPC_MONO_PATH} +$RM ${RPC_SPLIT_PATH} +## END release-please config migration + +## START release-please manifest migration ######################################## +# variable prefix: RPM_* +RPM_MONO_PATH=".release-please-manifest.json" +echo "Migrating: ${RPM_MONO_PATH}" + +# enable this if we want sorted keys. Keep it disabled to append new entries at +# the end (useful for debugging): +RPM_SORT_KEYS="${SORT_JSON_KEYS}" + +RPM_SPLIT_PATH="${PATH_PACKAGE}/.release-please-manifest.json" +RPM_VERSION="$(jq '."."' "${RPM_SPLIT_PATH}")" +jq ${INDENT_JSON} ${RPM_SORT_KEYS} ". * {\"${MONOREPO_PATH_PACKAGE}\": ${RPM_VERSION}}" ${RPM_MONO_PATH} | sponge ${RPM_MONO_PATH} +$RM ${RPM_SPLIT_PATH} +## END release-please manifest migration + +## START migrate release tags ######################################## +# We need to migrate the release tags since that is what release-please uses to +# get the next release number.We use ${RPM_VERSION} from the previous section +# variable prefix: LRT_* +echo "Replicating latest release tag" + +LRT_VERSION="${RPM_VERSION//\"/}" + +# any of the gapic_version.py files will do: they all match +LRT_VERSION_FILE="$(find ${MONOREPO_PATH_PACKAGE} -name "gapic_version.py" | head -n 1)" +LRT_SHA=$(git log --format=oneline ${LRT_VERSION_FILE} | grep release | head -n 1 | awk '{ print $1 }') +$GIT tag -f ${MONOREPO_PACKAGE_NAME}-v${LRT_VERSION} ${LRT_SHA} +$GIT push --tags +## END migrate release tags + + + + +## START .repo-metadata.json migration ######################################## +# variable prefix: RMJ_* +RMJ_MONO_PATH="${MONOREPO_PATH_PACKAGE}/.repo-metadata.json" +echo "Migrating: ${RMJ_MONO_PATH}" + +[[ -z ${DEBUG} ]] || { \ +RMJ_SPLIT_PATH="${PATH_PACKAGE}/.repo-metadata.json" +cp ${RMJ_SPLIT_PATH} ${RMJ_MONO_PATH} +} + +jq ${INDENT_JSON} '.repo = "googleapis/google-cloud-python"' ${RMJ_MONO_PATH} | sponge ${RMJ_MONO_PATH} +jq -r ".issue_tracker" "${RMJ_MONO_PATH}" | grep -q "github.com" && { + jq ${INDENT_JSON} '.issue_tracker = "https://github.com/googleapis/google-cloud-python/issues"' ${RMJ_MONO_PATH} | sponge ${RMJ_MONO_PATH} +} || { $NOP ; } +## END .repo-metadata.json migration + + +## START .pre-commit-config migration ######################################## +# variable prefix: PCC_* +PCC_MONO_PATH=".pre-commit-config.yaml" +echo "Migrating: ${PCC_MONO_PATH}" + +PCC_SPLIT_PATH="${PATH_PACKAGE}/.pre-commit-config.yaml" + +# We only copy this if it doesn't already exist in the mono-repo. +[[ ! -f "${PCC_MONO_PATH}" ]] && [[ -f "${PCC_SPLIT_PATH}" ]] && { + cp ${PCC_SPLIT_PATH} ${PCC_MONO_PATH} +} || { $NOP ; } +$RM -f ${PCC_SPLIT_PATH} +## END .pre-commit-config migration + +### Delete file that was copied over from owlbot-staging/ +${RM} -f "${PATH_PACKAGE}/${MONOREPO_PACKAGE_NAME}.txt" + + +## START commit changes ############################################# +echo "Committing changes locally" +${GIT} add . +${GIT} commit -am "migration(${MONOREPO_PACKAGE_NAME}): adjust metadata and automation configs" +## END commit changes + +popd >& /dev/null # "${PATH_MONOREPO}" + +[[ -z ${MESSAGE} ]] && echo "Done." || echo -e "Done, with a message:\n\n${MESSAGE}\n" + + +